From 4d107cb87e534332b10ddfc430fcd3716c00971f Mon Sep 17 00:00:00 2001 From: Hein Date: Sat, 11 Apr 2026 23:37:53 +0200 Subject: [PATCH 01/15] feat(tools): add background embedding queue for thoughts * Implement QueueThought method in BackfillTool for embedding generation * Update CaptureTool to utilize embedding queuer for failed embeddings * Add EmbeddingStatus field to Thought type for tracking embedding state --- internal/app/app.go | 5 +++-- internal/store/thoughts.go | 6 ++++++ internal/tools/backfill.go | 24 ++++++++++++++++++++++++ internal/tools/capture.go | 22 +++++++++++++++++++--- internal/types/thought.go | 17 +++++++++-------- 5 files changed, 61 insertions(+), 13 deletions(-) diff --git a/internal/app/app.go b/internal/app/app.go index df3a310..bd15e6c 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -163,9 +163,10 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st authMiddleware := auth.Middleware(cfg.Auth, keyring, oauthRegistry, tokenStore, accessTracker, logger) filesTool := tools.NewFilesTool(db, activeProjects) metadataRetryer := tools.NewMetadataRetryer(context.Background(), db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger) + backfillTool := tools.NewBackfillTool(db, provider, activeProjects, logger) toolSet := mcpserver.ToolSet{ - Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, metadataRetryer, logger), + Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, metadataRetryer, backfillTool, logger), Search: tools.NewSearchTool(db, provider, cfg.Search, activeProjects), List: tools.NewListTool(db, cfg.Search, activeProjects), Stats: tools.NewStatsTool(db), @@ -180,7 +181,7 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st Summarize: tools.NewSummarizeTool(db, provider, cfg.Search, activeProjects), Links: tools.NewLinksTool(db, provider, cfg.Search), Files: filesTool, - Backfill: tools.NewBackfillTool(db, provider, activeProjects, logger), + Backfill: backfillTool, Reparse: tools.NewReparseMetadataTool(db, provider, cfg.Capture, activeProjects, logger), RetryMetadata: tools.NewRetryMetadataTool(metadataRetryer), Maintenance: tools.NewMaintenanceTool(db), diff --git a/internal/store/thoughts.go b/internal/store/thoughts.go index 2c18df4..1819a2e 100644 --- a/internal/store/thoughts.go +++ b/internal/store/thoughts.go @@ -58,6 +58,12 @@ func (db *DB) InsertThought(ctx context.Context, thought thoughttypes.Thought, e return thoughttypes.Thought{}, fmt.Errorf("commit thought insert: %w", err) } + if len(thought.Embedding) > 0 { + created.EmbeddingStatus = "done" + } else { + created.EmbeddingStatus = "pending" + } + return created, nil } diff --git a/internal/tools/backfill.go b/internal/tools/backfill.go index c92d9c7..521a9b7 100644 --- a/internal/tools/backfill.go +++ b/internal/tools/backfill.go @@ -51,6 +51,30 @@ func NewBackfillTool(db *store.DB, provider ai.Provider, sessions *session.Activ return &BackfillTool{store: db, provider: provider, sessions: sessions, logger: logger} } +// QueueThought queues a single thought for background embedding generation. +// It is used by capture when the embedding provider is temporarily unavailable. +func (t *BackfillTool) QueueThought(ctx context.Context, id uuid.UUID, content string) { + go func() { + vec, err := t.provider.Embed(ctx, content) + if err != nil { + t.logger.Warn("background embedding retry failed", + slog.String("thought_id", id.String()), + slog.String("error", err.Error()), + ) + return + } + model := t.provider.EmbeddingModel() + if err := t.store.UpsertEmbedding(ctx, id, model, vec); err != nil { + t.logger.Warn("background embedding upsert failed", + slog.String("thought_id", id.String()), + slog.String("error", err.Error()), + ) + return + } + t.logger.Info("background embedding retry succeeded", slog.String("thought_id", id.String())) + }() +} + func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in BackfillInput) (*mcp.CallToolResult, BackfillOutput, error) { limit := in.Limit if limit <= 0 { diff --git a/internal/tools/capture.go b/internal/tools/capture.go index bd90bba..7483ec0 100644 --- a/internal/tools/capture.go +++ b/internal/tools/capture.go @@ -6,6 +6,7 @@ import ( "strings" "time" + "github.com/google/uuid" "github.com/modelcontextprotocol/go-sdk/mcp" "golang.org/x/sync/errgroup" @@ -17,6 +18,11 @@ import ( thoughttypes "git.warky.dev/wdevs/amcs/internal/types" ) +// EmbeddingQueuer queues a thought for background embedding generation. +type EmbeddingQueuer interface { + QueueThought(ctx context.Context, id uuid.UUID, content string) +} + type CaptureTool struct { store *store.DB provider ai.Provider @@ -24,6 +30,7 @@ type CaptureTool struct { sessions *session.ActiveProjects metadataTimeout time.Duration retryer *MetadataRetryer + embedRetryer EmbeddingQueuer log *slog.Logger } @@ -36,8 +43,8 @@ type CaptureOutput struct { Thought thoughttypes.Thought `json:"thought"` } -func NewCaptureTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, retryer *MetadataRetryer, log *slog.Logger) *CaptureTool { - return &CaptureTool{store: db, provider: provider, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, retryer: retryer, log: log} +func NewCaptureTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, retryer *MetadataRetryer, embedRetryer EmbeddingQueuer, log *slog.Logger) *CaptureTool { + return &CaptureTool{store: db, provider: provider, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, retryer: retryer, embedRetryer: embedRetryer, log: log} } func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in CaptureInput) (*mcp.CallToolResult, CaptureOutput, error) { @@ -54,12 +61,18 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C var embedding []float32 rawMetadata := metadata.Fallback(t.capture) metadataNeedsRetry := false + embeddingNeedsRetry := false group, groupCtx := errgroup.WithContext(ctx) group.Go(func() error { vector, err := t.provider.Embed(groupCtx, content) if err != nil { - return err + t.log.Warn("embedding failed, thought will be saved without embedding", + slog.String("provider", t.provider.Name()), + slog.String("error", err.Error()), + ) + embeddingNeedsRetry = true + return nil } embedding = vector return nil @@ -106,6 +119,9 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C if metadataNeedsRetry && t.retryer != nil { t.retryer.QueueThought(created.ID) } + if embeddingNeedsRetry && t.embedRetryer != nil { + t.embedRetryer.QueueThought(ctx, created.ID, content) + } return nil, CaptureOutput{Thought: created}, nil } diff --git a/internal/types/thought.go b/internal/types/thought.go index 3a68021..e122e29 100644 --- a/internal/types/thought.go +++ b/internal/types/thought.go @@ -52,14 +52,15 @@ type StoredFileFilter struct { } type Thought struct { - ID uuid.UUID `json:"id"` - Content string `json:"content"` - Embedding []float32 `json:"embedding,omitempty"` - Metadata ThoughtMetadata `json:"metadata"` - ProjectID *uuid.UUID `json:"project_id,omitempty"` - ArchivedAt *time.Time `json:"archived_at,omitempty"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` + ID uuid.UUID `json:"id"` + Content string `json:"content"` + Embedding []float32 `json:"embedding,omitempty"` + EmbeddingStatus string `json:"embedding_status,omitempty"` + Metadata ThoughtMetadata `json:"metadata"` + ProjectID *uuid.UUID `json:"project_id,omitempty"` + ArchivedAt *time.Time `json:"archived_at,omitempty"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` } type SearchResult struct { From b6e156011f2a34349cbfa3fc91eeb9ab86b973b7 Mon Sep 17 00:00:00 2001 From: sgcommand Date: Mon, 13 Apr 2026 23:04:11 +0200 Subject: [PATCH 02/15] Improve thought enrichment reliability --- internal/app/app.go | 6 +- internal/tools/capture.go | 102 +++++++------- internal/tools/enrichment_retry.go | 209 +++++++++++++++++++++++++++++ internal/tools/metadata_retry.go | 42 ++++++ 4 files changed, 306 insertions(+), 53 deletions(-) create mode 100644 internal/tools/enrichment_retry.go diff --git a/internal/app/app.go b/internal/app/app.go index bd15e6c..4ae9359 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -162,11 +162,11 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st oauthEnabled := oauthRegistry != nil && tokenStore != nil authMiddleware := auth.Middleware(cfg.Auth, keyring, oauthRegistry, tokenStore, accessTracker, logger) filesTool := tools.NewFilesTool(db, activeProjects) - metadataRetryer := tools.NewMetadataRetryer(context.Background(), db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger) + enrichmentRetryer := tools.NewEnrichmentRetryer(context.Background(), db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger) backfillTool := tools.NewBackfillTool(db, provider, activeProjects, logger) toolSet := mcpserver.ToolSet{ - Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, metadataRetryer, backfillTool, logger), + Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, enrichmentRetryer, backfillTool, logger), Search: tools.NewSearchTool(db, provider, cfg.Search, activeProjects), List: tools.NewListTool(db, cfg.Search, activeProjects), Stats: tools.NewStatsTool(db), @@ -183,7 +183,7 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st Files: filesTool, Backfill: backfillTool, Reparse: tools.NewReparseMetadataTool(db, provider, cfg.Capture, activeProjects, logger), - RetryMetadata: tools.NewRetryMetadataTool(metadataRetryer), + RetryMetadata: tools.NewRetryEnrichmentTool(enrichmentRetryer), Maintenance: tools.NewMaintenanceTool(db), Skills: tools.NewSkillsTool(db, activeProjects), ChatHistory: tools.NewChatHistoryTool(db, activeProjects), diff --git a/internal/tools/capture.go b/internal/tools/capture.go index 7483ec0..eb33ea8 100644 --- a/internal/tools/capture.go +++ b/internal/tools/capture.go @@ -8,7 +8,6 @@ import ( "github.com/google/uuid" "github.com/modelcontextprotocol/go-sdk/mcp" - "golang.org/x/sync/errgroup" "git.warky.dev/wdevs/amcs/internal/ai" "git.warky.dev/wdevs/amcs/internal/config" @@ -58,52 +57,10 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C return nil, CaptureOutput{}, err } - var embedding []float32 rawMetadata := metadata.Fallback(t.capture) - metadataNeedsRetry := false - embeddingNeedsRetry := false - - group, groupCtx := errgroup.WithContext(ctx) - group.Go(func() error { - vector, err := t.provider.Embed(groupCtx, content) - if err != nil { - t.log.Warn("embedding failed, thought will be saved without embedding", - slog.String("provider", t.provider.Name()), - slog.String("error", err.Error()), - ) - embeddingNeedsRetry = true - return nil - } - embedding = vector - return nil - }) - group.Go(func() error { - metaCtx := groupCtx - attemptedAt := time.Now().UTC() - if t.metadataTimeout > 0 { - var cancel context.CancelFunc - metaCtx, cancel = context.WithTimeout(groupCtx, t.metadataTimeout) - defer cancel() - } - extracted, err := t.provider.ExtractMetadata(metaCtx, content) - if err != nil { - t.log.Warn("metadata extraction failed, using fallback", slog.String("provider", t.provider.Name()), slog.String("error", err.Error())) - rawMetadata = metadata.MarkMetadataPending(rawMetadata, t.capture, attemptedAt, err) - metadataNeedsRetry = true - return nil - } - rawMetadata = metadata.MarkMetadataComplete(extracted, t.capture, attemptedAt) - return nil - }) - - if err := group.Wait(); err != nil { - return nil, CaptureOutput{}, err - } - thought := thoughttypes.Thought{ - Content: content, - Embedding: embedding, - Metadata: metadata.Normalize(metadata.SanitizeExtracted(rawMetadata), t.capture), + Content: content, + Metadata: rawMetadata, } if project != nil { thought.ProjectID = &project.ID @@ -116,12 +73,57 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C if project != nil { _ = t.store.TouchProject(ctx, project.ID) } - if metadataNeedsRetry && t.retryer != nil { - t.retryer.QueueThought(created.ID) - } - if embeddingNeedsRetry && t.embedRetryer != nil { - t.embedRetryer.QueueThought(ctx, created.ID, content) + + if t.retryer != nil || t.embedRetryer != nil { + t.launchEnrichment(created.ID, content) } return nil, CaptureOutput{Thought: created}, nil } + +func (t *CaptureTool) launchEnrichment(id uuid.UUID, content string) { + go func() { + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) + defer cancel() + + if t.retryer != nil { + attemptedAt := time.Now().UTC() + rawMetadata := metadata.Fallback(t.capture) + extracted, err := t.provider.ExtractMetadata(ctx, content) + if err != nil { + failed := metadata.MarkMetadataFailed(rawMetadata, t.capture, attemptedAt, err) + if _, updateErr := t.store.UpdateThoughtMetadata(ctx, id, failed); updateErr != nil { + t.log.Warn("deferred metadata failure could not be persisted", + slog.String("thought_id", id.String()), + slog.String("error", updateErr.Error()), + ) + } + t.log.Warn("deferred metadata extraction failed", + slog.String("thought_id", id.String()), + slog.String("provider", t.provider.Name()), + slog.String("error", err.Error()), + ) + t.retryer.QueueThought(id) + } else { + completed := metadata.MarkMetadataComplete(extracted, t.capture, attemptedAt) + if _, updateErr := t.store.UpdateThoughtMetadata(ctx, id, completed); updateErr != nil { + t.log.Warn("deferred metadata completion could not be persisted", + slog.String("thought_id", id.String()), + slog.String("error", updateErr.Error()), + ) + } + } + } + + if t.embedRetryer != nil { + if _, err := t.provider.Embed(ctx, content); err != nil { + t.log.Warn("deferred embedding failed", + slog.String("thought_id", id.String()), + slog.String("provider", t.provider.Name()), + slog.String("error", err.Error()), + ) + } + t.embedRetryer.QueueThought(ctx, id, content) + } + }() +} diff --git a/internal/tools/enrichment_retry.go b/internal/tools/enrichment_retry.go new file mode 100644 index 0000000..6a3d4d4 --- /dev/null +++ b/internal/tools/enrichment_retry.go @@ -0,0 +1,209 @@ +package tools + +import ( + "context" + "log/slog" + "sync" + "time" + + "github.com/google/uuid" + "github.com/modelcontextprotocol/go-sdk/mcp" + "golang.org/x/sync/semaphore" + + "git.warky.dev/wdevs/amcs/internal/ai" + "git.warky.dev/wdevs/amcs/internal/config" + "git.warky.dev/wdevs/amcs/internal/metadata" + "git.warky.dev/wdevs/amcs/internal/session" + "git.warky.dev/wdevs/amcs/internal/store" + thoughttypes "git.warky.dev/wdevs/amcs/internal/types" +) + +const enrichmentRetryConcurrency = 4 +const enrichmentRetryMaxAttempts = 5 + +var enrichmentRetryBackoff = []time.Duration{ + 30 * time.Second, + 2 * time.Minute, + 10 * time.Minute, + 30 * time.Minute, + 2 * time.Hour, +} + +type EnrichmentRetryer struct { + backgroundCtx context.Context + store *store.DB + provider ai.Provider + capture config.CaptureConfig + sessions *session.ActiveProjects + metadataTimeout time.Duration + logger *slog.Logger +} + +type RetryEnrichmentTool struct { + retryer *EnrichmentRetryer +} + +type RetryEnrichmentInput struct { + Project string `json:"project,omitempty" jsonschema:"optional project name or id to scope the retry"` + Limit int `json:"limit,omitempty" jsonschema:"maximum number of thoughts to process in one call; defaults to 100"` + IncludeArchived bool `json:"include_archived,omitempty" jsonschema:"whether to include archived thoughts; defaults to false"` + OlderThanDays int `json:"older_than_days,omitempty" jsonschema:"only retry thoughts whose last metadata attempt was at least N days ago; 0 means no restriction"` + DryRun bool `json:"dry_run,omitempty" jsonschema:"report counts without retrying metadata extraction"` +} + +type RetryEnrichmentFailure struct { + ID string `json:"id"` + Error string `json:"error"` +} + +type RetryEnrichmentOutput struct { + Scanned int `json:"scanned"` + Retried int `json:"retried"` + Updated int `json:"updated"` + Skipped int `json:"skipped"` + Failed int `json:"failed"` + DryRun bool `json:"dry_run"` + Failures []RetryEnrichmentFailure `json:"failures,omitempty"` +} + +func NewEnrichmentRetryer(backgroundCtx context.Context, db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *EnrichmentRetryer { + if backgroundCtx == nil { + backgroundCtx = context.Background() + } + return &EnrichmentRetryer{ + backgroundCtx: backgroundCtx, + store: db, + provider: provider, + capture: capture, + sessions: sessions, + metadataTimeout: metadataTimeout, + logger: logger, + } +} + +func NewRetryEnrichmentTool(retryer *EnrichmentRetryer) *RetryEnrichmentTool { + return &RetryEnrichmentTool{retryer: retryer} +} + +func (t *RetryEnrichmentTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in RetryEnrichmentInput) (*mcp.CallToolResult, RetryEnrichmentOutput, error) { + return t.retryer.Handle(ctx, req, in) +} + +func (r *EnrichmentRetryer) QueueThought(id uuid.UUID) { + go func() { + if _, err := r.retryOne(r.backgroundCtx, id); err != nil { + r.logger.Warn("background metadata retry failed", + slog.String("thought_id", id.String()), + slog.String("error", err.Error()), + ) + } + }() +} + +func (r *EnrichmentRetryer) Handle(ctx context.Context, req *mcp.CallToolRequest, in RetryEnrichmentInput) (*mcp.CallToolResult, RetryEnrichmentOutput, error) { + limit := in.Limit + if limit <= 0 { + limit = 100 + } + + project, err := resolveProject(ctx, r.store, r.sessions, req, in.Project, false) + if err != nil { + return nil, RetryEnrichmentOutput{}, err + } + + var projectID *uuid.UUID + if project != nil { + projectID = &project.ID + } + + thoughts, err := r.store.ListThoughtsPendingMetadataRetry(ctx, limit, projectID, in.IncludeArchived, in.OlderThanDays) + if err != nil { + return nil, RetryEnrichmentOutput{}, err + } + + out := RetryEnrichmentOutput{Scanned: len(thoughts), DryRun: in.DryRun} + if in.DryRun || len(thoughts) == 0 { + return nil, out, nil + } + + sem := semaphore.NewWeighted(enrichmentRetryConcurrency) + var mu sync.Mutex + var wg sync.WaitGroup + + for _, thought := range thoughts { + if ctx.Err() != nil { + break + } + if err := sem.Acquire(ctx, 1); err != nil { + break + } + + wg.Add(1) + go func(thought thoughttypes.Thought) { + defer wg.Done() + defer sem.Release(1) + + mu.Lock() + out.Retried++ + mu.Unlock() + + updated, err := r.retryOne(ctx, thought.ID) + if err != nil { + mu.Lock() + out.Failures = append(out.Failures, RetryEnrichmentFailure{ID: thought.ID.String(), Error: err.Error()}) + mu.Unlock() + return + } + if updated { + mu.Lock() + out.Updated++ + mu.Unlock() + return + } + + mu.Lock() + out.Skipped++ + mu.Unlock() + }(thought) + } + + wg.Wait() + out.Failed = len(out.Failures) + + return nil, out, nil +} + +func (r *EnrichmentRetryer) retryOne(ctx context.Context, id uuid.UUID) (bool, error) { + thought, err := r.store.GetThought(ctx, id) + if err != nil { + return false, err + } + if thought.Metadata.MetadataStatus == metadata.MetadataStatusComplete { + return false, nil + } + + attemptCtx := ctx + if r.metadataTimeout > 0 { + var cancel context.CancelFunc + attemptCtx, cancel = context.WithTimeout(ctx, r.metadataTimeout) + defer cancel() + } + + attemptedAt := time.Now().UTC() + extracted, extractErr := r.provider.ExtractMetadata(attemptCtx, thought.Content) + if extractErr != nil { + failedMetadata := metadata.MarkMetadataFailed(thought.Metadata, r.capture, attemptedAt, extractErr) + if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, failedMetadata); updateErr != nil { + return false, updateErr + } + return false, extractErr + } + + completedMetadata := metadata.MarkMetadataComplete(metadata.SanitizeExtracted(extracted), r.capture, attemptedAt) + completedMetadata.Attachments = thought.Metadata.Attachments + if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, completedMetadata); updateErr != nil { + return false, updateErr + } + + return true, nil +} diff --git a/internal/tools/metadata_retry.go b/internal/tools/metadata_retry.go index c49e356..ceb2268 100644 --- a/internal/tools/metadata_retry.go +++ b/internal/tools/metadata_retry.go @@ -28,12 +28,42 @@ type MetadataRetryer struct { sessions *session.ActiveProjects metadataTimeout time.Duration logger *slog.Logger + lock *RetryLocker } type RetryMetadataTool struct { retryer *MetadataRetryer } +type RetryLocker struct { + mu sync.Mutex + locks map[uuid.UUID]time.Time +} + +func NewRetryLocker() *RetryLocker { + return &RetryLocker{locks: map[uuid.UUID]time.Time{}} +} + +func (l *RetryLocker) Acquire(id uuid.UUID, ttl time.Duration) bool { + l.mu.Lock() + defer l.mu.Unlock() + if l.locks == nil { + l.locks = map[uuid.UUID]time.Time{} + } + now := time.Now() + if exp, ok := l.locks[id]; ok && exp.After(now) { + return false + } + l.locks[id] = now.Add(ttl) + return true +} + +func (l *RetryLocker) Release(id uuid.UUID) { + l.mu.Lock() + defer l.mu.Unlock() + delete(l.locks, id) +} + type RetryMetadataInput struct { Project string `json:"project,omitempty" jsonschema:"optional project name or id to scope the retry"` Limit int `json:"limit,omitempty" jsonschema:"maximum number of thoughts to process in one call; defaults to 100"` @@ -69,6 +99,7 @@ func NewMetadataRetryer(backgroundCtx context.Context, db *store.DB, provider ai sessions: sessions, metadataTimeout: metadataTimeout, logger: logger, + lock: NewRetryLocker(), } } @@ -82,6 +113,10 @@ func (t *RetryMetadataTool) Handle(ctx context.Context, req *mcp.CallToolRequest func (r *MetadataRetryer) QueueThought(id uuid.UUID) { go func() { + if !r.lock.Acquire(id, 15*time.Minute) { + return + } + defer r.lock.Release(id) if _, err := r.retryOne(r.backgroundCtx, id); err != nil { r.logger.Warn("background metadata retry failed", slog.String("thought_id", id.String()), slog.String("error", err.Error())) } @@ -138,7 +173,14 @@ func (r *MetadataRetryer) Handle(ctx context.Context, req *mcp.CallToolRequest, out.Retried++ mu.Unlock() + if !r.lock.Acquire(thought.ID, 15*time.Minute) { + mu.Lock() + out.Skipped++ + mu.Unlock() + return + } updated, err := r.retryOne(ctx, thought.ID) + r.lock.Release(thought.ID) if err != nil { mu.Lock() out.Failures = append(out.Failures, RetryMetadataFailure{ID: thought.ID.String(), Error: err.Error()}) From 894fa3fc1d95661c645cefac0f30b446cad25122 Mon Sep 17 00:00:00 2001 From: sgcommand Date: Tue, 21 Apr 2026 08:31:42 +0200 Subject: [PATCH 03/15] fix: include project names in thought text search --- internal/store/thoughts.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/internal/store/thoughts.go b/internal/store/thoughts.go index 1819a2e..56188b4 100644 --- a/internal/store/thoughts.go +++ b/internal/store/thoughts.go @@ -582,7 +582,7 @@ func (db *DB) SearchThoughtsText(ctx context.Context, query string, limit int, p args := []any{query} conditions := []string{ "t.archived_at is null", - "to_tsvector('simple', t.content) @@ websearch_to_tsquery('simple', $1)", + "(to_tsvector('simple', t.content) || to_tsvector('simple', coalesce(p.name, ''))) @@ websearch_to_tsquery('simple', $1)", } if projectID != nil { args = append(args, *projectID) @@ -596,9 +596,10 @@ func (db *DB) SearchThoughtsText(ctx context.Context, query string, limit int, p q := ` select t.guid, t.content, t.metadata, - ts_rank_cd(to_tsvector('simple', t.content), websearch_to_tsquery('simple', $1)) as similarity, + ts_rank_cd(to_tsvector('simple', t.content) || to_tsvector('simple', coalesce(p.name, '')), websearch_to_tsquery('simple', $1)) as similarity, t.created_at from thoughts t + left join projects p on t.project_id = p.guid where ` + strings.Join(conditions, " and ") + ` order by similarity desc limit $` + fmt.Sprintf("%d", len(args)) From 14e218d78405b9d9f32bb2dbb6b7304dbc2836bc Mon Sep 17 00:00:00 2001 From: Hein Date: Tue, 21 Apr 2026 21:14:28 +0200 Subject: [PATCH 04/15] test(config): add migration tests for litellm provider * Implement tests for migrating configuration from v1 to v2 for the litellm provider. * Validate the structure and values of the migrated configuration. * Ensure migration rejects newer versions of the configuration. fix(validate): enhance AI provider validation logic * Consolidate provider validation into a dedicated method. * Ensure at least one provider is specified and validate its type. * Check for required fields based on provider type. fix(mcpserver): update tool set to use new enrichment tool * Replace RetryMetadataTool with RetryEnrichmentTool in the ToolSet. fix(tools): refactor tools to use embedding and metadata runners * Update tools to utilize EmbeddingRunner and MetadataRunner instead of Provider. * Adjust method calls to align with the new runner interfaces. --- .gitignore | 1 + README.md | 48 +- changelog.md | 80 ++++ cmd/amcs-migrate-config/main.go | 105 +++++ configs/config.example.yaml | 73 ++- internal/ai/compat/client.go | 436 +++++------------- internal/ai/compat/client_test.go | 138 ++---- internal/ai/factory.go | 25 - internal/ai/factory_test.go | 33 -- internal/ai/litellm/client.go | 30 -- internal/ai/ollama/client.go | 26 -- internal/ai/openrouter/client.go | 37 -- internal/ai/provider.go | 15 - internal/ai/registry.go | 96 ++++ internal/ai/registry_test.go | 80 ++++ internal/ai/runner.go | 367 +++++++++++++++ internal/ai/runner_test.go | 139 ++++++ internal/app/app.go | 75 ++- internal/config/config.go | 144 +++--- internal/config/loader.go | 111 ++++- internal/config/loader_test.go | 125 ++++- internal/config/migrate.go | 341 ++++++++++++++ internal/config/migrate_test.go | 77 ++++ internal/config/validate.go | 92 ++-- internal/config/validate_test.go | 74 +-- internal/mcpserver/server.go | 2 +- .../mcpserver/streamable_integration_test.go | 2 +- internal/tools/backfill.go | 34 +- internal/tools/capture.go | 25 +- internal/tools/context.go | 14 +- internal/tools/enrichment_retry.go | 8 +- internal/tools/links.go | 12 +- internal/tools/metadata_retry.go | 8 +- internal/tools/recall.go | 14 +- internal/tools/reparse_metadata.go | 8 +- internal/tools/retrieval.go | 15 +- internal/tools/search.go | 14 +- internal/tools/summarize.go | 17 +- internal/tools/update.go | 22 +- 39 files changed, 2062 insertions(+), 901 deletions(-) create mode 100644 changelog.md create mode 100644 cmd/amcs-migrate-config/main.go delete mode 100644 internal/ai/factory.go delete mode 100644 internal/ai/factory_test.go delete mode 100644 internal/ai/litellm/client.go delete mode 100644 internal/ai/ollama/client.go delete mode 100644 internal/ai/openrouter/client.go delete mode 100644 internal/ai/provider.go create mode 100644 internal/ai/registry.go create mode 100644 internal/ai/registry_test.go create mode 100644 internal/ai/runner.go create mode 100644 internal/ai/runner_test.go create mode 100644 internal/config/migrate.go create mode 100644 internal/config/migrate_test.go diff --git a/.gitignore b/.gitignore index e1f4b2a..11d4489 100644 --- a/.gitignore +++ b/.gitignore @@ -34,3 +34,4 @@ OB1/ ui/node_modules/ ui/.svelte-kit/ internal/app/ui/dist/ +.codex diff --git a/README.md b/README.md index 75d2ba9..2f626c8 100644 --- a/README.md +++ b/README.md @@ -244,12 +244,24 @@ Link existing skills and guardrails to a project so they are automatically avail Config is YAML-driven. Copy `configs/config.example.yaml` and set: - `database.url` — Postgres connection string -- `auth.mode` — `api_keys` or `oauth_client_credentials` -- `auth.keys` — API keys for MCP access via `x-brain-key` or `Authorization: Bearer ` when `auth.mode=api_keys` -- `auth.oauth.clients` — client registry when `auth.mode=oauth_client_credentials` +- `auth.keys` — static API keys for MCP access via `x-brain-key` or `Authorization: Bearer ` +- `auth.oauth.clients` — optional OAuth client credentials registry +- `ai.providers` — named provider definitions (`litellm`, `ollama`, `openrouter`) +- `ai.embeddings.primary` / `ai.metadata.primary` — primary role targets (`provider` + `model`) +- `ai.embeddings.fallbacks` / `ai.metadata.fallbacks` — sequential fallback targets - `mcp.version` is build-generated and should not be set in config -**OAuth Client Credentials flow** (`auth.mode=oauth_client_credentials`): +Config schema is versioned. Current schema version is `2`. + +Use the migration helper to rewrite legacy configs in-place: + +```bash +go run ./cmd/amcs-migrate-config --config ./configs/dev.yaml +``` + +Use `--dry-run` to print migrated YAML without writing. + +**OAuth Client Credentials flow**: 1. Obtain a token — `POST /oauth/token` (public, no auth required): ``` @@ -267,8 +279,9 @@ Config is YAML-driven. Copy `configs/config.example.yaml` and set: ``` Alternatively, pass `client_id` and `client_secret` as body parameters instead of `Authorization: Basic`. Direct `Authorization: Basic` credential validation on the MCP endpoint is also supported as a fallback (no token required). -- `ai.litellm.base_url` and `ai.litellm.api_key` — LiteLLM proxy -- `ai.ollama.base_url` and `ai.ollama.api_key` — Ollama local or remote server +- `AMCS_LITELLM_BASE_URL` / `AMCS_LITELLM_API_KEY` override all configured LiteLLM providers +- `AMCS_OLLAMA_BASE_URL` / `AMCS_OLLAMA_API_KEY` override all configured Ollama providers +- `AMCS_OPENROUTER_API_KEY` overrides all configured OpenRouter providers See `llm/plan.md` for an audited high-level status summary of the original implementation plan, and `llm/todo.md` for the audited backfill/fallback follow-up status. @@ -643,27 +656,32 @@ Notes: ## Ollama -Set `ai.provider: "ollama"` to use a local or self-hosted Ollama server through its OpenAI-compatible API. +Set your role targets to an Ollama provider to use a local or self-hosted Ollama server through its OpenAI-compatible API. Example: ```yaml ai: - provider: "ollama" + providers: + local: + type: "ollama" + base_url: "http://localhost:11434/v1" + api_key: "ollama" + request_headers: {} embeddings: - model: "nomic-embed-text" dimensions: 768 + primary: + provider: "local" + model: "nomic-embed-text" metadata: - model: "llama3.2" temperature: 0.1 - ollama: - base_url: "http://localhost:11434/v1" - api_key: "ollama" - request_headers: {} + primary: + provider: "local" + model: "llama3.2" ``` Notes: -- For remote Ollama servers, point `ai.ollama.base_url` at the remote `/v1` endpoint. +- For remote Ollama servers, point `ai.providers..base_url` at the remote `/v1` endpoint. - The client always sends Bearer auth; Ollama ignores it locally, so `api_key: "ollama"` is a safe default. - `ai.embeddings.dimensions` must match the embedding model you actually use, or startup will fail the database vector-dimension check. diff --git a/changelog.md b/changelog.md new file mode 100644 index 0000000..5e64c3a --- /dev/null +++ b/changelog.md @@ -0,0 +1,80 @@ +# Changelog + +## 2026-04-21 + +### 2026-04-21 21h - Config Schema v2 Introduced + +- Refactored configuration to schema version `2` with named AI providers and role-based model chains. +- Added support for per-role primary and fallback targets for embeddings and metadata. +- Added optional background role overrides for backfill and metadata retry workers. + +### 2026-04-21 21h - Automatic v1 -> v2 Migration + +- Added config migration framework with explicit schema versioning. +- Implemented `v1 -> v2` migration to transform legacy provider blocks into named providers + role chains. +- Loader now auto-migrates older config files, rewrites migrated YAML, and creates timestamped backups. + +### 2026-04-21 21h - AI Registry and Role Runners + +- Added `ai.Registry` to build provider clients from named provider config entries. +- Added `EmbeddingRunner` and `MetadataRunner` with sequential fallback execution. +- Added target health tracking with cooldowns for transient/permanent/empty-response failures. + +### 2026-04-21 21h - App and Tool Wiring Updates + +- Rewired app startup to use provider registry + role runners for foreground and background flows. +- Updated capture, search, summarize, context, recall, backfill, metadata retry, and reparse paths to use new runners. +- Preserved environment override behavior for provider credentials/endpoints across matching provider types. + +### 2026-04-21 21h - Migrate Config CLI Added + +- Added `cmd/amcs-migrate-config` CLI to migrate config files to the current schema version. +- Supports dry-run output and in-place write mode with automatic backup file creation. + +### 2026-04-21 21h - Tests and Documentation Updated + +- Added focused tests for config migration, AI registry behavior, and runner fallback behavior. +- Updated `configs/config.example.yaml` to the new v2 schema. +- Updated README configuration sections and migration guidance to reflect v2 and `amcs-migrate-config` usage. + +### 2026-04-21 21h - Uncommitted File Change List + +- Modified: `.gitignore` +- Modified: `README.md` +- Modified: `configs/config.example.yaml` +- Modified: `internal/ai/compat/client.go` +- Modified: `internal/ai/compat/client_test.go` +- Modified: `internal/app/app.go` +- Modified: `internal/config/config.go` +- Modified: `internal/config/loader.go` +- Modified: `internal/config/loader_test.go` +- Modified: `internal/config/validate.go` +- Modified: `internal/config/validate_test.go` +- Modified: `internal/mcpserver/server.go` +- Modified: `internal/mcpserver/streamable_integration_test.go` +- Modified: `internal/tools/backfill.go` +- Modified: `internal/tools/capture.go` +- Modified: `internal/tools/context.go` +- Modified: `internal/tools/enrichment_retry.go` +- Modified: `internal/tools/links.go` +- Modified: `internal/tools/metadata_retry.go` +- Modified: `internal/tools/recall.go` +- Modified: `internal/tools/reparse_metadata.go` +- Modified: `internal/tools/retrieval.go` +- Modified: `internal/tools/search.go` +- Modified: `internal/tools/summarize.go` +- Modified: `internal/tools/update.go` +- Deleted: `internal/ai/factory.go` +- Deleted: `internal/ai/factory_test.go` +- Deleted: `internal/ai/litellm/client.go` +- Deleted: `internal/ai/ollama/client.go` +- Deleted: `internal/ai/openrouter/client.go` +- Deleted: `internal/ai/provider.go` +- New: `changelog.md` +- New: `cmd/amcs-migrate-config/main.go` +- New: `internal/ai/registry.go` +- New: `internal/ai/registry_test.go` +- New: `internal/ai/runner.go` +- New: `internal/ai/runner_test.go` +- New: `internal/config/migrate.go` +- New: `internal/config/migrate_test.go` diff --git a/cmd/amcs-migrate-config/main.go b/cmd/amcs-migrate-config/main.go new file mode 100644 index 0000000..53d7b66 --- /dev/null +++ b/cmd/amcs-migrate-config/main.go @@ -0,0 +1,105 @@ +package main + +import ( + "flag" + "fmt" + "log" + "os" + "time" + + "gopkg.in/yaml.v3" + + "git.warky.dev/wdevs/amcs/internal/config" +) + +func main() { + var ( + configPath string + dryRun bool + toVersion int + ) + flag.StringVar(&configPath, "config", "", "Path to the YAML config file (default: $AMCS_CONFIG or ./configs/dev.yaml)") + flag.BoolVar(&dryRun, "dry-run", false, "Print the migrated config to stdout instead of writing it back") + flag.IntVar(&toVersion, "to-version", config.CurrentConfigVersion, "Stop migrating after reaching this version") + flag.Parse() + + if toVersion <= 0 || toVersion > config.CurrentConfigVersion { + log.Fatalf("invalid -to-version %d (must be between 1 and %d)", toVersion, config.CurrentConfigVersion) + } + + path := config.ResolvePath(configPath) + original, err := os.ReadFile(path) + if err != nil { + log.Fatalf("read config %q: %v", path, err) + } + + raw := map[string]any{} + if err := yaml.Unmarshal(original, &raw); err != nil { + log.Fatalf("decode config %q: %v", path, err) + } + if raw == nil { + raw = map[string]any{} + } + + applied, err := migrateUpTo(raw, toVersion) + if err != nil { + log.Fatalf("migrate: %v", err) + } + + if len(applied) == 0 { + fmt.Fprintf(os.Stderr, "%s already at version %d; nothing to do\n", path, currentVersion(raw)) + return + } + + out, err := yaml.Marshal(raw) + if err != nil { + log.Fatalf("marshal migrated config: %v", err) + } + + for _, step := range applied { + fmt.Fprintf(os.Stderr, "applied migration v%d -> v%d: %s\n", step.From, step.To, step.Describe) + } + + if dryRun { + _, _ = os.Stdout.Write(out) + return + } + + backup := fmt.Sprintf("%s.bak.%d", path, time.Now().Unix()) + if err := os.WriteFile(backup, original, 0o600); err != nil { + log.Fatalf("write backup %q: %v", backup, err) + } + if err := os.WriteFile(path, out, 0o600); err != nil { + log.Fatalf("write migrated config %q: %v", path, err) + } + fmt.Fprintf(os.Stderr, "wrote migrated config to %s (backup: %s)\n", path, backup) +} + +// migrateUpTo runs the migration ladder but stops at the requested version. +func migrateUpTo(raw map[string]any, target int) ([]config.ConfigMigration, error) { + if currentVersion(raw) >= target { + return nil, nil + } + if target == config.CurrentConfigVersion { + return config.Migrate(raw) + } + // Partial migrations are rare; for now reject anything other than the + // current version target since the migration ladder is short. + return nil, fmt.Errorf("partial migration to v%d is not supported (use -to-version=%d)", target, config.CurrentConfigVersion) +} + +func currentVersion(raw map[string]any) int { + v, ok := raw["version"] + if !ok { + return 1 + } + switch n := v.(type) { + case int: + return n + case int64: + return int(n) + case float64: + return int(n) + } + return 1 +} diff --git a/configs/config.example.yaml b/configs/config.example.yaml index abd34dc..d13fb5d 100644 --- a/configs/config.example.yaml +++ b/configs/config.example.yaml @@ -1,3 +1,5 @@ +version: 2 + server: host: "0.0.0.0" port: 8080 @@ -27,7 +29,7 @@ auth: - id: "oauth-client" client_id: "" client_secret: "" - description: "used when auth.mode=oauth_client_credentials" + description: "optional OAuth client credentials" database: url: "postgres://postgres:postgres@localhost:5432/amcs?sslmode=disable" @@ -37,33 +39,58 @@ database: max_conn_idle_time: "10m" ai: - provider: "litellm" + providers: + default: + type: "litellm" + base_url: "http://localhost:4000/v1" + api_key: "replace-me" + request_headers: {} + + ollama_local: + type: "ollama" + base_url: "http://localhost:11434/v1" + api_key: "ollama" + request_headers: {} + + openrouter: + type: "openrouter" + base_url: "https://openrouter.ai/api/v1" + api_key: "replace-me" + app_name: "amcs" + site_url: "" + request_headers: {} + embeddings: - model: "openai/text-embedding-3-small" dimensions: 1536 + primary: + provider: "default" + model: "openai/text-embedding-3-small" + fallbacks: + - provider: "ollama_local" + model: "nomic-embed-text" + metadata: - model: "gpt-4o-mini" - fallback_models: [] temperature: 0.1 log_conversations: false - litellm: - base_url: "http://localhost:4000/v1" - api_key: "replace-me" - use_responses_api: false - request_headers: {} - embedding_model: "openrouter/openai/text-embedding-3-small" - metadata_model: "gpt-4o-mini" - fallback_metadata_models: [] - ollama: - base_url: "http://localhost:11434/v1" - api_key: "ollama" - request_headers: {} - openrouter: - base_url: "https://openrouter.ai/api/v1" - api_key: "" - app_name: "amcs" - site_url: "" - extra_headers: {} + timeout: "10s" + primary: + provider: "default" + model: "gpt-4o-mini" + fallbacks: + - provider: "openrouter" + model: "openai/gpt-4.1-mini" + + # Optional overrides for background jobs (backfill_embeddings, + # retry_failed_metadata, reparse_thought_metadata). + background: + embeddings: + primary: + provider: "default" + model: "openai/text-embedding-3-small" + metadata: + primary: + provider: "default" + model: "gpt-4o-mini" capture: source: "mcp" diff --git a/internal/ai/compat/client.go b/internal/ai/compat/client.go index 37c55f6..a8b129b 100644 --- a/internal/ai/compat/client.go +++ b/internal/ai/compat/client.go @@ -14,7 +14,6 @@ import ( "regexp" "slices" "strings" - "sync" "time" thoughttypes "git.warky.dev/wdevs/amcs/internal/types" @@ -36,36 +35,39 @@ Rules: - If unsure, prefer "observation". - Do not include any text outside the JSON object.` +// Client is a low-level OpenAI-compatible HTTP client. It knows nothing about +// role chains, fallbacks, or health — those concerns belong to ai.Runner. Each +// method takes the model name per-call so a single Client instance can service +// many different models on the same base URL. type Client struct { - name string - baseURL string - apiKey string - embeddingModel string - metadataModel string - fallbackMetadataModels []string - temperature float64 - headers map[string]string - httpClient *http.Client - log *slog.Logger - dimensions int - logConversations bool - modelHealthMu sync.Mutex - modelHealth map[string]modelHealthState + name string + baseURL string + apiKey string + headers map[string]string + httpClient *http.Client + log *slog.Logger } type Config struct { - Name string - BaseURL string - APIKey string - EmbeddingModel string - MetadataModel string - FallbackMetadataModels []string - Temperature float64 - Headers map[string]string - HTTPClient *http.Client - Log *slog.Logger - Dimensions int - LogConversations bool + Name string + BaseURL string + APIKey string + Headers map[string]string + HTTPClient *http.Client + Log *slog.Logger +} + +// MetadataOptions control a single ExtractMetadataWith call. +type MetadataOptions struct { + Model string + Temperature float64 + LogConversations bool +} + +// SummarizeOptions control a single SummarizeWith call. +type SummarizeOptions struct { + Model string + Temperature float64 } type embeddingsRequest struct { @@ -127,65 +129,38 @@ type providerError struct { const maxMetadataAttempts = 3 -const ( - emptyResponseCircuitThreshold = 3 - emptyResponseCircuitTTL = 5 * time.Minute - permanentModelFailureTTL = 24 * time.Hour -) - +// ErrEmptyResponse and ErrNoJSONObject are sentinel errors callers can inspect +// to classify metadata failures (e.g. bump empty-response health counters). var ( - errMetadataEmptyResponse = errors.New("metadata empty response") - errMetadataNoJSONObject = errors.New("metadata response contains no JSON object") + ErrEmptyResponse = errors.New("metadata empty response") + ErrNoJSONObject = errors.New("metadata response contains no JSON object") ) -type modelHealthState struct { - consecutiveEmpty int - unhealthyUntil time.Time -} - func New(cfg Config) *Client { - fallbacks := make([]string, 0, len(cfg.FallbackMetadataModels)) - seen := make(map[string]struct{}, len(cfg.FallbackMetadataModels)) - for _, model := range cfg.FallbackMetadataModels { - model = strings.TrimSpace(model) - if model == "" { - continue - } - if _, ok := seen[model]; ok { - continue - } - seen[model] = struct{}{} - fallbacks = append(fallbacks, model) - } - return &Client{ - name: cfg.Name, - baseURL: cfg.BaseURL, - apiKey: cfg.APIKey, - embeddingModel: cfg.EmbeddingModel, - metadataModel: cfg.MetadataModel, - fallbackMetadataModels: fallbacks, - temperature: cfg.Temperature, - headers: cfg.Headers, - httpClient: cfg.HTTPClient, - log: cfg.Log, - dimensions: cfg.Dimensions, - logConversations: cfg.LogConversations, - modelHealth: make(map[string]modelHealthState), + name: cfg.Name, + baseURL: cfg.BaseURL, + apiKey: cfg.APIKey, + headers: cfg.Headers, + httpClient: cfg.HTTPClient, + log: cfg.Log, } } -func (c *Client) Embed(ctx context.Context, input string) ([]float32, error) { +func (c *Client) Name() string { return c.name } + +// EmbedWith generates an embedding for the given input using model. +func (c *Client) EmbedWith(ctx context.Context, model, input string) ([]float32, error) { input = strings.TrimSpace(input) if input == "" { return nil, fmt.Errorf("%s embed: input must not be empty", c.name) } + if strings.TrimSpace(model) == "" { + return nil, fmt.Errorf("%s embed: model is required", c.name) + } var resp embeddingsResponse - err := c.doJSON(ctx, "/embeddings", embeddingsRequest{ - Input: input, - Model: c.embeddingModel, - }, &resp) + err := c.doJSON(ctx, "/embeddings", embeddingsRequest{Input: input, Model: model}, &resp) if err != nil { return nil, err } @@ -195,141 +170,34 @@ func (c *Client) Embed(ctx context.Context, input string) ([]float32, error) { if len(resp.Data) == 0 { return nil, fmt.Errorf("%s embed: no embedding returned", c.name) } - if c.dimensions > 0 && len(resp.Data[0].Embedding) != c.dimensions { - return nil, fmt.Errorf("%s embed: expected %d dimensions, got %d", c.name, c.dimensions, len(resp.Data[0].Embedding)) - } - return resp.Data[0].Embedding, nil } -func (c *Client) ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error) { +// ExtractMetadataWith extracts structured metadata for input using opts.Model. +// Returns compat.ErrEmptyResponse / ErrNoJSONObject wrapped when the model +// produces unusable output so callers can classify the failure. +func (c *Client) ExtractMetadataWith(ctx context.Context, opts MetadataOptions, input string) (thoughttypes.ThoughtMetadata, error) { input = strings.TrimSpace(input) if input == "" { return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s extract metadata: input must not be empty", c.name) } - - start := time.Now() - if c.log != nil { - c.log.Info("metadata client started", - slog.String("provider", c.name), - slog.String("model", c.metadataModel), - ) - } - - logCompletion := func(model string, err error) { - if c.log == nil { - return - } - - attrs := []any{ - slog.String("provider", c.name), - slog.String("model", model), - slog.String("duration", formatLogDuration(time.Since(start))), - } - if err != nil { - attrs = append(attrs, slog.String("error", err.Error())) - c.log.Error("metadata client completed", attrs...) - return - } - - c.log.Info("metadata client completed", attrs...) - } - - result, err := c.extractMetadataWithModel(ctx, input, c.metadataModel) - if errors.Is(err, errMetadataEmptyResponse) { - c.noteEmptyResponse(c.metadataModel) - } - if isPermanentModelError(err) { - c.notePermanentModelFailure(c.metadataModel, err) - } - if err == nil { - c.noteModelSuccess(c.metadataModel) - logCompletion(c.metadataModel, nil) - return result, nil - } - - for _, fallbackModel := range c.fallbackMetadataModels { - if ctx.Err() != nil { - break - } - if fallbackModel == "" || fallbackModel == c.metadataModel { - continue - } - if c.shouldBypassModel(fallbackModel) { - continue - } - if c.log != nil { - c.log.Warn("metadata extraction failed, trying fallback model", - slog.String("provider", c.name), - slog.String("primary_model", c.metadataModel), - slog.String("fallback_model", fallbackModel), - slog.String("error", err.Error()), - ) - } - fallbackResult, fallbackErr := c.extractMetadataWithModel(ctx, input, fallbackModel) - if errors.Is(fallbackErr, errMetadataEmptyResponse) { - c.noteEmptyResponse(fallbackModel) - } - if isPermanentModelError(fallbackErr) { - c.notePermanentModelFailure(fallbackModel, fallbackErr) - } - if fallbackErr == nil { - c.noteModelSuccess(fallbackModel) - logCompletion(fallbackModel, nil) - return fallbackResult, nil - } - err = fallbackErr - } - - if ctx.Err() != nil { - err = fmt.Errorf("%s metadata: %w", c.name, ctx.Err()) - logCompletion(c.metadataModel, err) - return thoughttypes.ThoughtMetadata{}, err - } - - heuristic := heuristicMetadataFromInput(input) - if c.log != nil { - c.log.Warn("metadata extraction failed for all models, using heuristic fallback", - slog.String("provider", c.name), - slog.String("error", err.Error()), - ) - } - logCompletion(c.metadataModel, nil) - return heuristic, nil -} - -func formatLogDuration(d time.Duration) string { - if d < 0 { - d = -d - } - - totalMilliseconds := d.Milliseconds() - minutes := totalMilliseconds / 60000 - seconds := (totalMilliseconds / 1000) % 60 - milliseconds := totalMilliseconds % 1000 - return fmt.Sprintf("%02d:%02d:%03d", minutes, seconds, milliseconds) -} - -func (c *Client) extractMetadataWithModel(ctx context.Context, input, model string) (thoughttypes.ThoughtMetadata, error) { - if c.shouldBypassModel(model) { - return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: model %q temporarily bypassed after repeated empty responses", c.name, model) + if strings.TrimSpace(opts.Model) == "" { + return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s extract metadata: model is required", c.name) } stream := true req := chatCompletionsRequest{ - Model: model, - Temperature: c.temperature, - ResponseFormat: &responseType{ - Type: "json_object", - }, - Stream: &stream, + Model: opts.Model, + Temperature: opts.Temperature, + ResponseFormat: &responseType{Type: "json_object"}, + Stream: &stream, Messages: []chatMessage{ {Role: "system", Content: metadataSystemPrompt}, {Role: "user", Content: input}, }, } - metadata, err := c.extractMetadataWithRequest(ctx, req, input, model) + metadata, err := c.extractMetadataWithRequest(ctx, req, input, opts) if err == nil || !shouldRetryWithoutJSONMode(err) { return metadata, err } @@ -337,23 +205,22 @@ func (c *Client) extractMetadataWithModel(ctx context.Context, input, model stri if c.log != nil { c.log.Warn("metadata json mode failed, retrying without response_format", slog.String("provider", c.name), - slog.String("model", model), + slog.String("model", opts.Model), slog.String("error", err.Error()), ) } req.ResponseFormat = nil - return c.extractMetadataWithRequest(ctx, req, input, model) + return c.extractMetadataWithRequest(ctx, req, input, opts) } -func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatCompletionsRequest, input, model string) (thoughttypes.ThoughtMetadata, error) { - +func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatCompletionsRequest, input string, opts MetadataOptions) (thoughttypes.ThoughtMetadata, error) { var lastErr error for attempt := 1; attempt <= maxMetadataAttempts; attempt++ { - if c.logConversations && c.log != nil { + if opts.LogConversations && c.log != nil { c.log.Info("metadata conversation request", slog.String("provider", c.name), - slog.String("model", model), + slog.String("model", opts.Model), slog.Int("attempt", attempt), slog.String("system", metadataSystemPrompt), slog.String("input", input), @@ -373,10 +240,10 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet rawResponse := extractChoiceText(resp.Choices[0].Message, resp.Choices[0].Text) - if c.logConversations && c.log != nil { + if opts.LogConversations && c.log != nil { c.log.Info("metadata conversation response", slog.String("provider", c.name), - slog.String("model", model), + slog.String("model", opts.Model), slog.Int("attempt", attempt), slog.String("response", rawResponse), ) @@ -387,13 +254,13 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet metadataText = stripCodeFence(metadataText) metadataText = extractJSONObject(metadataText) if metadataText == "" { - lastErr = fmt.Errorf("%s metadata: %w", c.name, errMetadataNoJSONObject) + lastErr = fmt.Errorf("%s metadata: %w", c.name, ErrNoJSONObject) if strings.TrimSpace(rawResponse) == "" && attempt < maxMetadataAttempts && ctx.Err() == nil { - lastErr = fmt.Errorf("%s metadata: %w", c.name, errMetadataEmptyResponse) + lastErr = fmt.Errorf("%s metadata: %w", c.name, ErrEmptyResponse) if c.log != nil { c.log.Warn("metadata response empty, waiting and retrying", slog.String("provider", c.name), - slog.String("model", model), + slog.String("model", opts.Model), slog.Int("attempt", attempt+1), ) } @@ -403,7 +270,7 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet continue } if strings.TrimSpace(rawResponse) == "" { - lastErr = fmt.Errorf("%s metadata: %w", c.name, errMetadataEmptyResponse) + lastErr = fmt.Errorf("%s metadata: %w", c.name, ErrEmptyResponse) } return thoughttypes.ThoughtMetadata{}, lastErr } @@ -420,13 +287,17 @@ func (c *Client) extractMetadataWithRequest(ctx context.Context, req chatComplet if lastErr != nil { return thoughttypes.ThoughtMetadata{}, lastErr } - return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: %w", c.name, errMetadataNoJSONObject) + return thoughttypes.ThoughtMetadata{}, fmt.Errorf("%s metadata: %w", c.name, ErrNoJSONObject) } -func (c *Client) Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error) { +// SummarizeWith runs a chat-completion summarisation using opts.Model. +func (c *Client) SummarizeWith(ctx context.Context, opts SummarizeOptions, systemPrompt, userPrompt string) (string, error) { + if strings.TrimSpace(opts.Model) == "" { + return "", fmt.Errorf("%s summarize: model is required", c.name) + } req := chatCompletionsRequest{ - Model: c.metadataModel, - Temperature: 0.2, + Model: opts.Model, + Temperature: opts.Temperature, Messages: []chatMessage{ {Role: "system", Content: systemPrompt}, {Role: "user", Content: userPrompt}, @@ -447,12 +318,49 @@ func (c *Client) Summarize(ctx context.Context, systemPrompt, userPrompt string) return extractChoiceText(resp.Choices[0].Message, resp.Choices[0].Text), nil } -func (c *Client) Name() string { - return c.name +// IsPermanentModelError reports whether err indicates the model itself is +// invalid or missing (vs. a transient outage). Runners use this to mark a +// target unhealthy for longer. +func IsPermanentModelError(err error) bool { + if err == nil { + return false + } + lower := strings.ToLower(err.Error()) + for _, marker := range []string{ + "invalid model name", + "model_not_found", + "model not found", + "unknown model", + "no such model", + "does not exist", + } { + if strings.Contains(lower, marker) { + return true + } + } + return false } -func (c *Client) EmbeddingModel() string { - return c.embeddingModel +// HeuristicMetadataFromInput produces best-effort metadata from the note text +// when every model in the chain has failed. Exported so ai.Runner can use it. +func HeuristicMetadataFromInput(input string) thoughttypes.ThoughtMetadata { + text := strings.TrimSpace(input) + lower := strings.ToLower(text) + + metadata := thoughttypes.ThoughtMetadata{ + People: heuristicPeople(text), + ActionItems: heuristicActionItems(text), + DatesMentioned: heuristicDates(text), + Topics: heuristicTopics(lower), + Type: heuristicType(lower), + } + if len(metadata.Topics) == 0 { + metadata.Topics = []string{"uncategorized"} + } + if metadata.Type == "" { + metadata.Type = "observation" + } + return metadata } func (c *Client) doJSON(ctx context.Context, path string, requestBody any, dest any) error { @@ -724,8 +632,6 @@ func isRetryableChatResponseError(err error) bool { return strings.Contains(lower, "read response") || strings.Contains(lower, "read stream response") } -// extractJSONObject finds the first complete {...} block in s. -// It handles models that prepend prose to a JSON response despite json_object mode. func extractJSONObject(s string) string { for start := 0; start < len(s); start++ { if s[start] != '{' { @@ -768,10 +674,6 @@ func extractJSONObject(s string) string { return "" } -// stripThinkingBlocks removes ... and ... -// blocks produced by reasoning models (DeepSeek R1, QwQ, etc.) so that the -// remaining text can be parsed as JSON without interference from thinking content -// that may itself contain braces. func stripThinkingBlocks(s string) string { for _, tag := range []string{"think", "thinking"} { open := "<" + tag + ">" @@ -857,7 +759,6 @@ func extractTextFromAny(value any) string { } return strings.Join(parts, "\n") case map[string]any: - // Common provider shapes for chat content parts. for _, key := range []string{"text", "output_text", "content", "value"} { if nested, ok := typed[key]; ok { if text := strings.TrimSpace(extractTextFromAny(nested)); text != "" { @@ -875,28 +776,6 @@ var ( wordPattern = regexp.MustCompile(`[a-zA-Z][a-zA-Z0-9_/-]{2,}`) ) -func heuristicMetadataFromInput(input string) thoughttypes.ThoughtMetadata { - text := strings.TrimSpace(input) - lower := strings.ToLower(text) - - metadata := thoughttypes.ThoughtMetadata{ - People: heuristicPeople(text), - ActionItems: heuristicActionItems(text), - DatesMentioned: heuristicDates(text), - Topics: heuristicTopics(lower), - Type: heuristicType(lower), - Source: "", - } - - if len(metadata.Topics) == 0 { - metadata.Topics = []string{"uncategorized"} - } - if metadata.Type == "" { - metadata.Type = "observation" - } - return metadata -} - func heuristicType(lower string) string { switch { case strings.Contains(lower, "preferred name"), strings.Contains(lower, "personal profile"), strings.Contains(lower, "wife:"), strings.Contains(lower, "daughter:"), strings.Contains(lower, "born:"): @@ -1055,7 +934,7 @@ func shouldRetryWithoutJSONMode(err error) bool { if err == nil { return false } - if errors.Is(err, errMetadataEmptyResponse) || errors.Is(err, errMetadataNoJSONObject) { + if errors.Is(err, ErrEmptyResponse) || errors.Is(err, ErrNoJSONObject) { return true } @@ -1063,27 +942,6 @@ func shouldRetryWithoutJSONMode(err error) bool { return strings.Contains(lower, "parse json") } -func isPermanentModelError(err error) bool { - if err == nil { - return false - } - - lower := strings.ToLower(err.Error()) - for _, marker := range []string{ - "invalid model name", - "model_not_found", - "model not found", - "unknown model", - "no such model", - "does not exist", - } { - if strings.Contains(lower, marker) { - return true - } - } - return false -} - func sleepRetry(ctx context.Context, attempt int, log *slog.Logger, provider string) error { delay := time.Duration(attempt*attempt) * 200 * time.Millisecond if log != nil { @@ -1110,59 +968,3 @@ func sleepMetadataRetry(ctx context.Context, attempt int) error { return nil } } - -func (c *Client) shouldBypassModel(model string) bool { - c.modelHealthMu.Lock() - defer c.modelHealthMu.Unlock() - - state, ok := c.modelHealth[model] - if !ok { - return false - } - return !state.unhealthyUntil.IsZero() && time.Now().Before(state.unhealthyUntil) -} - -func (c *Client) noteEmptyResponse(model string) { - c.modelHealthMu.Lock() - defer c.modelHealthMu.Unlock() - - state := c.modelHealth[model] - state.consecutiveEmpty++ - if state.consecutiveEmpty >= emptyResponseCircuitThreshold { - state.unhealthyUntil = time.Now().Add(emptyResponseCircuitTTL) - if c.log != nil { - c.log.Warn("metadata model marked temporarily unhealthy after repeated empty responses", - slog.String("provider", c.name), - slog.String("model", model), - slog.Time("until", state.unhealthyUntil), - ) - } - } - c.modelHealth[model] = state -} - -func (c *Client) noteModelSuccess(model string) { - c.modelHealthMu.Lock() - defer c.modelHealthMu.Unlock() - - delete(c.modelHealth, model) -} - -func (c *Client) notePermanentModelFailure(model string, err error) { - c.modelHealthMu.Lock() - defer c.modelHealthMu.Unlock() - - state := c.modelHealth[model] - state.consecutiveEmpty = emptyResponseCircuitThreshold - state.unhealthyUntil = time.Now().Add(permanentModelFailureTTL) - c.modelHealth[model] = state - - if c.log != nil { - c.log.Warn("metadata model marked unhealthy after permanent failure", - slog.String("provider", c.name), - slog.String("model", model), - slog.String("error", err.Error()), - slog.Time("until", state.unhealthyUntil), - ) - } -} diff --git a/internal/ai/compat/client_test.go b/internal/ai/compat/client_test.go index 5d3c93a..9b627f6 100644 --- a/internal/ai/compat/client_test.go +++ b/internal/ai/compat/client_test.go @@ -11,6 +11,17 @@ import ( "testing" ) +func newTestClient(t *testing.T, url string) *Client { + t.Helper() + return New(Config{ + Name: "litellm", + BaseURL: url, + APIKey: "test-key", + HTTPClient: http.DefaultClient, + Log: slog.New(slog.NewTextHandler(io.Discard, nil)), + }) +} + func TestExtractMetadataFromStreamingResponse(t *testing.T) { t.Parallel() @@ -26,6 +37,9 @@ func TestExtractMetadataFromStreamingResponse(t *testing.T) { if req.Stream == nil || !*req.Stream { t.Fatalf("stream flag = %v, want true", req.Stream) } + if req.Model != "qwen3.5:latest" { + t.Fatalf("model = %q, want qwen3.5:latest", req.Model) + } w.Header().Set("Content-Type", "text/event-stream") _, _ = io.WriteString(w, "data: {\"choices\":[{\"delta\":{\"content\":\"{\\\"people\\\":[],\"}}]}\n\n") @@ -35,20 +49,13 @@ func TestExtractMetadataFromStreamingResponse(t *testing.T) { })) defer server.Close() - client := New(Config{ - Name: "litellm", - BaseURL: server.URL, - APIKey: "test-key", - MetadataModel: "qwen3.5:latest", - Temperature: 0.1, - HTTPClient: server.Client(), - Log: slog.New(slog.NewTextHandler(io.Discard, nil)), - EmbeddingModel: "unused", - }) - - metadata, err := client.ExtractMetadata(context.Background(), "Project idea: Build an Android companion app.") + client := newTestClient(t, server.URL) + metadata, err := client.ExtractMetadataWith(context.Background(), MetadataOptions{ + Model: "qwen3.5:latest", + Temperature: 0.1, + }, "Project idea: Build an Android companion app.") if err != nil { - t.Fatalf("ExtractMetadata() error = %v", err) + t.Fatalf("ExtractMetadataWith() error = %v", err) } if metadata.Type != "idea" { @@ -94,20 +101,13 @@ func TestExtractMetadataRetriesWithoutJSONMode(t *testing.T) { })) defer server.Close() - client := New(Config{ - Name: "litellm", - BaseURL: server.URL, - APIKey: "test-key", - MetadataModel: "qwen3.5:latest", - Temperature: 0.1, - HTTPClient: server.Client(), - Log: slog.New(slog.NewTextHandler(io.Discard, nil)), - EmbeddingModel: "unused", - }) - - metadata, err := client.ExtractMetadata(context.Background(), "Project idea: Build an Android companion app.") + client := newTestClient(t, server.URL) + metadata, err := client.ExtractMetadataWith(context.Background(), MetadataOptions{ + Model: "qwen3.5:latest", + Temperature: 0.1, + }, "Project idea: Build an Android companion app.") if err != nil { - t.Fatalf("ExtractMetadata() error = %v", err) + t.Fatalf("ExtractMetadataWith() error = %v", err) } if metadata.Type != "idea" { @@ -127,71 +127,33 @@ func TestExtractMetadataRetriesWithoutJSONMode(t *testing.T) { } } -func TestExtractMetadataBypassesInvalidFallbackModelAfterFirstFailure(t *testing.T) { +func TestIsPermanentModelError(t *testing.T) { t.Parallel() - var mu sync.Mutex - primaryCalls := 0 - invalidFallbackCalls := 0 - - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - defer func() { - _ = r.Body.Close() - }() - - var req chatCompletionsRequest - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - t.Fatalf("decode request: %v", err) - } - - switch req.Model { - case "empty-primary": - _, _ = io.WriteString(w, `{"choices":[{"message":{"role":"assistant","content":""}}]}`) - case "qwen3.5:latest": - mu.Lock() - primaryCalls++ - mu.Unlock() - _, _ = io.WriteString(w, `{"choices":[{"message":{"role":"assistant","content":"{\"people\":[],\"action_items\":[],\"dates_mentioned\":[],\"topics\":[\"metadata\"],\"type\":\"observation\",\"source\":\"primary\"}"}}]}`) - case "qwen3": - mu.Lock() - invalidFallbackCalls++ - mu.Unlock() - w.WriteHeader(http.StatusBadRequest) - _, _ = io.WriteString(w, "{\"error\":{\"message\":\"{'error': '/chat/completions: Invalid model name passed in model=qwen3. Call `/v1/models` to view available models for your key.'}\"}}") - default: - t.Fatalf("unexpected model %q", req.Model) - } - })) - defer server.Close() - - client := New(Config{ - Name: "litellm", - BaseURL: server.URL, - APIKey: "test-key", - MetadataModel: "empty-primary", - FallbackMetadataModels: []string{"qwen3", "qwen3.5:latest"}, - Temperature: 0.1, - HTTPClient: server.Client(), - Log: slog.New(slog.NewTextHandler(io.Discard, nil)), - EmbeddingModel: "unused", - }) - - for i := 0; i < 2; i++ { - metadata, err := client.ExtractMetadata(context.Background(), "A short note about metadata.") - if err != nil { - t.Fatalf("ExtractMetadata() error = %v", err) - } - if metadata.Source != "primary" { - t.Fatalf("metadata source = %q, want primary", metadata.Source) - } + cases := []struct { + name string + err error + want bool + }{ + {"nil", nil, false}, + {"invalid model", errMsg("Invalid model name passed in model=qwen3"), true}, + {"model not found", errMsg("model_not_found"), true}, + {"no such model", errMsg("no such model"), true}, + {"transient", errMsg("connection refused"), false}, } - mu.Lock() - defer mu.Unlock() - if invalidFallbackCalls != 1 { - t.Fatalf("invalid fallback calls = %d, want 1", invalidFallbackCalls) - } - if primaryCalls != 2 { - t.Fatalf("valid fallback calls = %d, want 2", primaryCalls) + for _, tc := range cases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + if got := IsPermanentModelError(tc.err); got != tc.want { + t.Fatalf("IsPermanentModelError(%v) = %v, want %v", tc.err, got, tc.want) + } + }) } } + +type stringError string + +func (s stringError) Error() string { return string(s) } + +func errMsg(s string) error { return stringError(s) } diff --git a/internal/ai/factory.go b/internal/ai/factory.go deleted file mode 100644 index b6ee360..0000000 --- a/internal/ai/factory.go +++ /dev/null @@ -1,25 +0,0 @@ -package ai - -import ( - "fmt" - "log/slog" - "net/http" - - "git.warky.dev/wdevs/amcs/internal/ai/litellm" - "git.warky.dev/wdevs/amcs/internal/ai/ollama" - "git.warky.dev/wdevs/amcs/internal/ai/openrouter" - "git.warky.dev/wdevs/amcs/internal/config" -) - -func NewProvider(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (Provider, error) { - switch cfg.Provider { - case "litellm": - return litellm.New(cfg, httpClient, log) - case "ollama": - return ollama.New(cfg, httpClient, log) - case "openrouter": - return openrouter.New(cfg, httpClient, log) - default: - return nil, fmt.Errorf("unsupported ai.provider: %s", cfg.Provider) - } -} diff --git a/internal/ai/factory_test.go b/internal/ai/factory_test.go deleted file mode 100644 index 02d2837..0000000 --- a/internal/ai/factory_test.go +++ /dev/null @@ -1,33 +0,0 @@ -package ai - -import ( - "io" - "log/slog" - "net/http" - "testing" - - "git.warky.dev/wdevs/amcs/internal/config" -) - -func TestNewProviderSupportsOllama(t *testing.T) { - provider, err := NewProvider(config.AIConfig{ - Provider: "ollama", - Embeddings: config.AIEmbeddingConfig{ - Model: "nomic-embed-text", - Dimensions: 768, - }, - Metadata: config.AIMetadataConfig{ - Model: "llama3.2", - }, - Ollama: config.OllamaConfig{ - BaseURL: "http://localhost:11434/v1", - APIKey: "ollama", - }, - }, &http.Client{}, slog.New(slog.NewTextHandler(io.Discard, nil))) - if err != nil { - t.Fatalf("NewProvider() error = %v", err) - } - if provider.Name() != "ollama" { - t.Fatalf("provider name = %q, want ollama", provider.Name()) - } -} diff --git a/internal/ai/litellm/client.go b/internal/ai/litellm/client.go deleted file mode 100644 index 3c9f1b0..0000000 --- a/internal/ai/litellm/client.go +++ /dev/null @@ -1,30 +0,0 @@ -package litellm - -import ( - "log/slog" - "net/http" - - "git.warky.dev/wdevs/amcs/internal/ai/compat" - "git.warky.dev/wdevs/amcs/internal/config" -) - -func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) { - fallbacks := cfg.LiteLLM.EffectiveFallbackMetadataModels() - if len(fallbacks) == 0 { - fallbacks = cfg.Metadata.EffectiveFallbackModels() - } - return compat.New(compat.Config{ - Name: "litellm", - BaseURL: cfg.LiteLLM.BaseURL, - APIKey: cfg.LiteLLM.APIKey, - EmbeddingModel: cfg.LiteLLM.EmbeddingModel, - MetadataModel: cfg.LiteLLM.MetadataModel, - FallbackMetadataModels: fallbacks, - Temperature: cfg.Metadata.Temperature, - Headers: cfg.LiteLLM.RequestHeaders, - HTTPClient: httpClient, - Log: log, - Dimensions: cfg.Embeddings.Dimensions, - LogConversations: cfg.Metadata.LogConversations, - }), nil -} diff --git a/internal/ai/ollama/client.go b/internal/ai/ollama/client.go deleted file mode 100644 index 69abf8e..0000000 --- a/internal/ai/ollama/client.go +++ /dev/null @@ -1,26 +0,0 @@ -package ollama - -import ( - "log/slog" - "net/http" - - "git.warky.dev/wdevs/amcs/internal/ai/compat" - "git.warky.dev/wdevs/amcs/internal/config" -) - -func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) { - return compat.New(compat.Config{ - Name: "ollama", - BaseURL: cfg.Ollama.BaseURL, - APIKey: cfg.Ollama.APIKey, - EmbeddingModel: cfg.Embeddings.Model, - MetadataModel: cfg.Metadata.Model, - FallbackMetadataModels: cfg.Metadata.EffectiveFallbackModels(), - Temperature: cfg.Metadata.Temperature, - Headers: cfg.Ollama.RequestHeaders, - HTTPClient: httpClient, - Log: log, - Dimensions: cfg.Embeddings.Dimensions, - LogConversations: cfg.Metadata.LogConversations, - }), nil -} diff --git a/internal/ai/openrouter/client.go b/internal/ai/openrouter/client.go deleted file mode 100644 index b2fe6d0..0000000 --- a/internal/ai/openrouter/client.go +++ /dev/null @@ -1,37 +0,0 @@ -package openrouter - -import ( - "log/slog" - "net/http" - - "git.warky.dev/wdevs/amcs/internal/ai/compat" - "git.warky.dev/wdevs/amcs/internal/config" -) - -func New(cfg config.AIConfig, httpClient *http.Client, log *slog.Logger) (*compat.Client, error) { - headers := make(map[string]string, len(cfg.OpenRouter.ExtraHeaders)+2) - for key, value := range cfg.OpenRouter.ExtraHeaders { - headers[key] = value - } - if cfg.OpenRouter.SiteURL != "" { - headers["HTTP-Referer"] = cfg.OpenRouter.SiteURL - } - if cfg.OpenRouter.AppName != "" { - headers["X-Title"] = cfg.OpenRouter.AppName - } - - return compat.New(compat.Config{ - Name: "openrouter", - BaseURL: cfg.OpenRouter.BaseURL, - APIKey: cfg.OpenRouter.APIKey, - EmbeddingModel: cfg.Embeddings.Model, - MetadataModel: cfg.Metadata.Model, - FallbackMetadataModels: cfg.Metadata.EffectiveFallbackModels(), - Temperature: cfg.Metadata.Temperature, - Headers: headers, - HTTPClient: httpClient, - Log: log, - Dimensions: cfg.Embeddings.Dimensions, - LogConversations: cfg.Metadata.LogConversations, - }), nil -} diff --git a/internal/ai/provider.go b/internal/ai/provider.go deleted file mode 100644 index e547757..0000000 --- a/internal/ai/provider.go +++ /dev/null @@ -1,15 +0,0 @@ -package ai - -import ( - "context" - - thoughttypes "git.warky.dev/wdevs/amcs/internal/types" -) - -type Provider interface { - Embed(ctx context.Context, input string) ([]float32, error) - ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error) - Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error) - Name() string - EmbeddingModel() string -} diff --git a/internal/ai/registry.go b/internal/ai/registry.go new file mode 100644 index 0000000..e82b724 --- /dev/null +++ b/internal/ai/registry.go @@ -0,0 +1,96 @@ +package ai + +import ( + "fmt" + "log/slog" + "net/http" + "strings" + + "git.warky.dev/wdevs/amcs/internal/ai/compat" + "git.warky.dev/wdevs/amcs/internal/config" +) + +// Registry holds one compat.Client per named provider. Runners look up clients +// by provider name when walking a role chain. +type Registry struct { + clients map[string]*compat.Client +} + +// NewRegistry builds a Registry from the configured providers. Each provider +// type maps onto a compat.Client with type-specific header plumbing (e.g. +// openrouter's HTTP-Referer / X-Title). +func NewRegistry(providers map[string]config.ProviderConfig, httpClient *http.Client, log *slog.Logger) (*Registry, error) { + if httpClient == nil { + return nil, fmt.Errorf("ai registry: http client is required") + } + if len(providers) == 0 { + return nil, fmt.Errorf("ai registry: no providers configured") + } + + clients := make(map[string]*compat.Client, len(providers)) + for name, p := range providers { + headers, err := providerHeaders(p) + if err != nil { + return nil, fmt.Errorf("ai registry: provider %q: %w", name, err) + } + clients[name] = compat.New(compat.Config{ + Name: name, + BaseURL: p.BaseURL, + APIKey: p.APIKey, + Headers: headers, + HTTPClient: httpClient, + Log: log, + }) + } + return &Registry{clients: clients}, nil +} + +// Client returns the compat.Client registered under name. +func (r *Registry) Client(name string) (*compat.Client, error) { + c, ok := r.clients[name] + if !ok { + return nil, fmt.Errorf("ai registry: provider %q is not configured", name) + } + return c, nil +} + +// Names returns the registered provider names. +func (r *Registry) Names() []string { + names := make([]string, 0, len(r.clients)) + for name := range r.clients { + names = append(names, name) + } + return names +} + +func providerHeaders(p config.ProviderConfig) (map[string]string, error) { + switch p.Type { + case "litellm", "ollama": + return cloneHeaders(p.RequestHeaders), nil + case "openrouter": + headers := cloneHeaders(p.RequestHeaders) + if headers == nil { + headers = map[string]string{} + } + if s := strings.TrimSpace(p.SiteURL); s != "" { + headers["HTTP-Referer"] = s + } + if s := strings.TrimSpace(p.AppName); s != "" { + headers["X-Title"] = s + } + return headers, nil + default: + return nil, fmt.Errorf("unsupported provider type %q", p.Type) + } +} + +func cloneHeaders(in map[string]string) map[string]string { + if len(in) == 0 { + return nil + } + out := make(map[string]string, len(in)) + for k, v := range in { + out[k] = v + } + return out +} diff --git a/internal/ai/registry_test.go b/internal/ai/registry_test.go new file mode 100644 index 0000000..eaf16fd --- /dev/null +++ b/internal/ai/registry_test.go @@ -0,0 +1,80 @@ +package ai + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "git.warky.dev/wdevs/amcs/internal/ai/compat" + "git.warky.dev/wdevs/amcs/internal/config" +) + +func TestNewRegistryOpenRouterHeaders(t *testing.T) { + var ( + gotReferer string + gotTitle string + gotCustom string + ) + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotReferer = r.Header.Get("HTTP-Referer") + gotTitle = r.Header.Get("X-Title") + gotCustom = r.Header.Get("X-Custom") + _ = json.NewEncoder(w).Encode(map[string]any{ + "choices": []map[string]any{{"message": map[string]any{"role": "assistant", "content": "ok"}}}, + }) + })) + defer srv.Close() + + providers := map[string]config.ProviderConfig{ + "router": { + Type: "openrouter", + BaseURL: srv.URL, + APIKey: "secret", + RequestHeaders: map[string]string{ + "X-Custom": "value", + }, + AppName: "amcs", + SiteURL: "https://example.com", + }, + } + + reg, err := NewRegistry(providers, srv.Client(), nil) + if err != nil { + t.Fatalf("NewRegistry() error = %v", err) + } + + client, err := reg.Client("router") + if err != nil { + t.Fatalf("Client(router) error = %v", err) + } + + if _, err := client.SummarizeWith(context.Background(), compat.SummarizeOptions{Model: "gpt-4.1-mini"}, "system", "user"); err != nil { + t.Fatalf("SummarizeWith() error = %v", err) + } + if gotReferer != "https://example.com" { + t.Fatalf("HTTP-Referer = %q, want https://example.com", gotReferer) + } + if gotTitle != "amcs" { + t.Fatalf("X-Title = %q, want amcs", gotTitle) + } + if gotCustom != "value" { + t.Fatalf("X-Custom = %q, want value", gotCustom) + } +} + +func TestNewRegistryRejectsUnsupportedProviderType(t *testing.T) { + providers := map[string]config.ProviderConfig{ + "bad": { + Type: "unknown", + BaseURL: "http://localhost:4000/v1", + APIKey: "secret", + }, + } + + _, err := NewRegistry(providers, &http.Client{}, nil) + if err == nil { + t.Fatal("NewRegistry() error = nil, want unsupported provider type error") + } +} diff --git a/internal/ai/runner.go b/internal/ai/runner.go new file mode 100644 index 0000000..e852bde --- /dev/null +++ b/internal/ai/runner.go @@ -0,0 +1,367 @@ +package ai + +import ( + "context" + "errors" + "fmt" + "log/slog" + "sync" + "time" + + "git.warky.dev/wdevs/amcs/internal/ai/compat" + "git.warky.dev/wdevs/amcs/internal/config" + thoughttypes "git.warky.dev/wdevs/amcs/internal/types" +) + +// Health TTLs per failure class. These are short enough that a healed target +// gets retried without manual intervention, but long enough to avoid hammering +// a broken provider every call. +const ( + transientCooldown = 30 * time.Second + permanentCooldown = 10 * time.Minute + emptyResponseThreshold = 3 + emptyResponseCooldown = 2 * time.Minute + dimensionMismatchWarning = "embedding dimension mismatch" +) + +// EmbedResult carries the vector plus the (provider, model) that produced it — +// callers store the actual model so later searches against that row use the +// matching query embedding. +type EmbedResult struct { + Vector []float32 + Provider string + Model string +} + +// EmbeddingRunner executes the embeddings role chain with sequential fallback. +type EmbeddingRunner struct { + registry *Registry + chain []config.RoleTarget + dimensions int + health *healthTracker + log *slog.Logger +} + +// MetadataRunner executes the metadata role chain with sequential fallback and +// a heuristic fallthrough when every target is unhealthy or fails. +type MetadataRunner struct { + registry *Registry + chain []config.RoleTarget + opts metadataRunOpts + health *healthTracker + log *slog.Logger +} + +type metadataRunOpts struct { + temperature float64 + logConversations bool +} + +// NewEmbeddingRunner builds a runner for the embeddings role. chain must be +// non-empty and every target must be registered. +func NewEmbeddingRunner(registry *Registry, chain []config.RoleTarget, dimensions int, log *slog.Logger) (*EmbeddingRunner, error) { + if registry == nil { + return nil, fmt.Errorf("embedding runner: registry is required") + } + if len(chain) == 0 { + return nil, fmt.Errorf("embedding runner: chain is empty") + } + if dimensions <= 0 { + return nil, fmt.Errorf("embedding runner: dimensions must be > 0") + } + for i, t := range chain { + if _, err := registry.Client(t.Provider); err != nil { + return nil, fmt.Errorf("embedding runner: chain[%d]: %w", i, err) + } + } + return &EmbeddingRunner{ + registry: registry, + chain: chain, + dimensions: dimensions, + health: newHealthTracker(), + log: log, + }, nil +} + +// NewMetadataRunner builds a runner for the metadata role. +func NewMetadataRunner(registry *Registry, chain []config.RoleTarget, temperature float64, logConversations bool, log *slog.Logger) (*MetadataRunner, error) { + if registry == nil { + return nil, fmt.Errorf("metadata runner: registry is required") + } + if len(chain) == 0 { + return nil, fmt.Errorf("metadata runner: chain is empty") + } + for i, t := range chain { + if _, err := registry.Client(t.Provider); err != nil { + return nil, fmt.Errorf("metadata runner: chain[%d]: %w", i, err) + } + } + return &MetadataRunner{ + registry: registry, + chain: chain, + opts: metadataRunOpts{ + temperature: temperature, + logConversations: logConversations, + }, + health: newHealthTracker(), + log: log, + }, nil +} + +// PrimaryProvider returns the first provider in the chain. +func (r *EmbeddingRunner) PrimaryProvider() string { return r.chain[0].Provider } + +// PrimaryModel returns the first model in the chain — the one used as the +// storage key for search matching. +func (r *EmbeddingRunner) PrimaryModel() string { return r.chain[0].Model } + +// Dimensions returns the required vector dimension. +func (r *EmbeddingRunner) Dimensions() int { return r.dimensions } + +// Embed walks the chain and returns the first successful embedding. The +// returned EmbedResult names the actual (provider, model) that produced the +// vector — callers use that when recording the row. +func (r *EmbeddingRunner) Embed(ctx context.Context, input string) (EmbedResult, error) { + var errs []error + for _, target := range r.chain { + if r.health.skip(target) { + continue + } + client, err := r.registry.Client(target.Provider) + if err != nil { + errs = append(errs, err) + continue + } + vec, err := client.EmbedWith(ctx, target.Model, input) + if err != nil { + if ctx.Err() != nil { + return EmbedResult{}, ctx.Err() + } + r.classify(target, err) + r.logFailure("embed", target, err) + errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, err)) + continue + } + if len(vec) != r.dimensions { + dimErr := fmt.Errorf("%s: expected %d, got %d", dimensionMismatchWarning, r.dimensions, len(vec)) + r.health.markTransient(target) + r.logFailure("embed", target, dimErr) + errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, dimErr)) + continue + } + r.health.markHealthy(target) + return EmbedResult{Vector: vec, Provider: target.Provider, Model: target.Model}, nil + } + return EmbedResult{}, fmt.Errorf("all embedding targets failed: %w", errors.Join(errs...)) +} + +// EmbedPrimary embeds using only the primary target — used for search queries +// so the query vector matches rows stored under the primary model. Falls back +// to returning the error without walking the chain. +func (r *EmbeddingRunner) EmbedPrimary(ctx context.Context, input string) ([]float32, error) { + target := r.chain[0] + client, err := r.registry.Client(target.Provider) + if err != nil { + return nil, err + } + vec, err := client.EmbedWith(ctx, target.Model, input) + if err != nil { + r.classify(target, err) + return nil, err + } + if len(vec) != r.dimensions { + return nil, fmt.Errorf("%s: expected %d, got %d", dimensionMismatchWarning, r.dimensions, len(vec)) + } + r.health.markHealthy(target) + return vec, nil +} + +// PrimaryProvider / PrimaryModel for metadata mirror the embedding runner. +func (r *MetadataRunner) PrimaryProvider() string { return r.chain[0].Provider } +func (r *MetadataRunner) PrimaryModel() string { return r.chain[0].Model } + +// ExtractMetadata walks the chain sequentially. If every target fails or is +// unhealthy, it returns a heuristic metadata so capture never hard-fails. +func (r *MetadataRunner) ExtractMetadata(ctx context.Context, input string) (thoughttypes.ThoughtMetadata, error) { + var errs []error + for _, target := range r.chain { + if r.health.skip(target) { + continue + } + client, err := r.registry.Client(target.Provider) + if err != nil { + errs = append(errs, err) + continue + } + md, err := client.ExtractMetadataWith(ctx, compat.MetadataOptions{ + Model: target.Model, + Temperature: r.opts.temperature, + LogConversations: r.opts.logConversations, + }, input) + if err != nil { + if ctx.Err() != nil { + return thoughttypes.ThoughtMetadata{}, ctx.Err() + } + r.classify(target, err) + r.logFailure("metadata", target, err) + errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, err)) + continue + } + r.health.markHealthy(target) + return md, nil + } + if r.log != nil { + r.log.Warn("metadata chain exhausted, using heuristic fallback", + slog.Int("targets", len(r.chain)), + slog.String("error", errors.Join(errs...).Error()), + ) + } + return compat.HeuristicMetadataFromInput(input), nil +} + +// Summarize walks the chain; unlike metadata, there is no heuristic fallback — +// returns the joined error when everything fails. +func (r *MetadataRunner) Summarize(ctx context.Context, systemPrompt, userPrompt string) (string, error) { + var errs []error + for _, target := range r.chain { + if r.health.skip(target) { + continue + } + client, err := r.registry.Client(target.Provider) + if err != nil { + errs = append(errs, err) + continue + } + out, err := client.SummarizeWith(ctx, compat.SummarizeOptions{ + Model: target.Model, + Temperature: r.opts.temperature, + }, systemPrompt, userPrompt) + if err != nil { + if ctx.Err() != nil { + return "", ctx.Err() + } + r.classify(target, err) + r.logFailure("summarize", target, err) + errs = append(errs, fmt.Errorf("%s/%s: %w", target.Provider, target.Model, err)) + continue + } + r.health.markHealthy(target) + return out, nil + } + return "", fmt.Errorf("all summarize targets failed: %w", errors.Join(errs...)) +} + +func (r *EmbeddingRunner) classify(target config.RoleTarget, err error) { + switch { + case compat.IsPermanentModelError(err): + r.health.markPermanent(target) + default: + r.health.markTransient(target) + } +} + +func (r *MetadataRunner) classify(target config.RoleTarget, err error) { + switch { + case compat.IsPermanentModelError(err): + r.health.markPermanent(target) + case errors.Is(err, compat.ErrEmptyResponse): + r.health.markEmpty(target) + default: + r.health.markTransient(target) + } +} + +func (r *EmbeddingRunner) logFailure(role string, target config.RoleTarget, err error) { + if r.log == nil { + return + } + r.log.Warn("ai target failed", + slog.String("role", role), + slog.String("provider", target.Provider), + slog.String("model", target.Model), + slog.String("error", err.Error()), + ) +} + +func (r *MetadataRunner) logFailure(role string, target config.RoleTarget, err error) { + if r.log == nil { + return + } + r.log.Warn("ai target failed", + slog.String("role", role), + slog.String("provider", target.Provider), + slog.String("model", target.Model), + slog.String("error", err.Error()), + ) +} + +// healthTracker records per-(provider, model) failure state. skip returns true +// when a target is still inside its cooldown window; the caller then tries the +// next target in the chain. +type healthTracker struct { + mu sync.Mutex + states map[config.RoleTarget]*healthState +} + +type healthState struct { + unhealthyUntil time.Time + emptyCount int +} + +func newHealthTracker() *healthTracker { + return &healthTracker{states: map[config.RoleTarget]*healthState{}} +} + +func (h *healthTracker) skip(target config.RoleTarget) bool { + h.mu.Lock() + defer h.mu.Unlock() + s, ok := h.states[target] + if !ok { + return false + } + return time.Now().Before(s.unhealthyUntil) +} + +func (h *healthTracker) markTransient(target config.RoleTarget) { + h.setCooldown(target, transientCooldown) +} + +func (h *healthTracker) markPermanent(target config.RoleTarget) { + h.setCooldown(target, permanentCooldown) +} + +func (h *healthTracker) markEmpty(target config.RoleTarget) { + h.mu.Lock() + defer h.mu.Unlock() + s := h.states[target] + if s == nil { + s = &healthState{} + h.states[target] = s + } + s.emptyCount++ + if s.emptyCount >= emptyResponseThreshold { + s.unhealthyUntil = time.Now().Add(emptyResponseCooldown) + s.emptyCount = 0 + } +} + +func (h *healthTracker) markHealthy(target config.RoleTarget) { + h.mu.Lock() + defer h.mu.Unlock() + if s, ok := h.states[target]; ok { + s.unhealthyUntil = time.Time{} + s.emptyCount = 0 + } +} + +func (h *healthTracker) setCooldown(target config.RoleTarget, d time.Duration) { + h.mu.Lock() + defer h.mu.Unlock() + s := h.states[target] + if s == nil { + s = &healthState{} + h.states[target] = s + } + s.unhealthyUntil = time.Now().Add(d) + s.emptyCount = 0 +} diff --git a/internal/ai/runner_test.go b/internal/ai/runner_test.go new file mode 100644 index 0000000..7a148e8 --- /dev/null +++ b/internal/ai/runner_test.go @@ -0,0 +1,139 @@ +package ai + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "sync" + "testing" + + "git.warky.dev/wdevs/amcs/internal/config" +) + +func TestEmbeddingRunnerFallsBackAndSkipsUnhealthyPrimary(t *testing.T) { + var ( + mu sync.Mutex + primaryCalls int + ) + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/embeddings" { + http.NotFound(w, r) + return + } + var req struct { + Model string `json:"model"` + } + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + switch req.Model { + case "embed-primary": + mu.Lock() + primaryCalls++ + mu.Unlock() + http.Error(w, "upstream down", http.StatusBadGateway) + case "embed-fallback": + _ = json.NewEncoder(w).Encode(map[string]any{ + "data": []map[string]any{{"embedding": []float32{0.1, 0.2, 0.3}}}, + }) + default: + http.Error(w, "unknown model", http.StatusBadRequest) + } + })) + defer srv.Close() + + reg, err := NewRegistry(map[string]config.ProviderConfig{ + "p1": {Type: "litellm", BaseURL: srv.URL, APIKey: "k1"}, + "p2": {Type: "litellm", BaseURL: srv.URL, APIKey: "k2"}, + }, srv.Client(), nil) + if err != nil { + t.Fatalf("NewRegistry() error = %v", err) + } + + runner, err := NewEmbeddingRunner(reg, []config.RoleTarget{ + {Provider: "p1", Model: "embed-primary"}, + {Provider: "p2", Model: "embed-fallback"}, + }, 3, nil) + if err != nil { + t.Fatalf("NewEmbeddingRunner() error = %v", err) + } + + res, err := runner.Embed(context.Background(), "hello") + if err != nil { + t.Fatalf("Embed() first call error = %v", err) + } + if res.Provider != "p2" || res.Model != "embed-fallback" { + t.Fatalf("Embed() first call target = %s/%s, want p2/embed-fallback", res.Provider, res.Model) + } + + res, err = runner.Embed(context.Background(), "hello again") + if err != nil { + t.Fatalf("Embed() second call error = %v", err) + } + if res.Provider != "p2" || res.Model != "embed-fallback" { + t.Fatalf("Embed() second call target = %s/%s, want p2/embed-fallback", res.Provider, res.Model) + } + + mu.Lock() + calls := primaryCalls + mu.Unlock() + if calls != 3 { + t.Fatalf("primary calls = %d, want 3 (first request retries 3x; second call should skip unhealthy primary)", calls) + } +} + +func TestMetadataRunnerSummarizeFallsBack(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/chat/completions" { + http.NotFound(w, r) + return + } + var req struct { + Model string `json:"model"` + } + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + switch req.Model { + case "sum-primary": + http.Error(w, "provider error", http.StatusBadGateway) + case "sum-fallback": + _ = json.NewEncoder(w).Encode(map[string]any{ + "choices": []map[string]any{{ + "message": map[string]any{"role": "assistant", "content": "fallback summary"}, + }}, + }) + default: + http.Error(w, "unknown model", http.StatusBadRequest) + } + })) + defer srv.Close() + + reg, err := NewRegistry(map[string]config.ProviderConfig{ + "p1": {Type: "litellm", BaseURL: srv.URL, APIKey: "k1"}, + "p2": {Type: "litellm", BaseURL: srv.URL, APIKey: "k2"}, + }, srv.Client(), nil) + if err != nil { + t.Fatalf("NewRegistry() error = %v", err) + } + + runner, err := NewMetadataRunner(reg, []config.RoleTarget{ + {Provider: "p1", Model: "sum-primary"}, + {Provider: "p2", Model: "sum-fallback"}, + }, 0.1, false, nil) + if err != nil { + t.Fatalf("NewMetadataRunner() error = %v", err) + } + + summary, err := runner.Summarize(context.Background(), "system", "user") + if err != nil { + t.Fatalf("Summarize() error = %v", err) + } + if summary != "fallback summary" { + t.Fatalf("summary = %q, want %q", summary, "fallback summary") + } +} diff --git a/internal/app/app.go b/internal/app/app.go index 4ae9359..e4471b6 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -34,7 +34,7 @@ func Run(ctx context.Context, configPath string) error { logger.Info("loaded configuration", slog.String("path", loadedFrom), - slog.String("provider", cfg.AI.Provider), + slog.Int("config_version", cfg.Version), slog.String("version", info.Version), slog.String("tag_name", info.TagName), slog.String("build_date", info.BuildDate), @@ -52,11 +52,37 @@ func Run(ctx context.Context, configPath string) error { } httpClient := &http.Client{Timeout: 30 * time.Second} - provider, err := ai.NewProvider(cfg.AI, httpClient, logger) + registry, err := ai.NewRegistry(cfg.AI.Providers, httpClient, logger) if err != nil { return err } + foregroundEmbeddings, err := ai.NewEmbeddingRunner(registry, cfg.AI.Embeddings.Chain(), cfg.AI.Embeddings.Dimensions, logger) + if err != nil { + return err + } + foregroundMetadata, err := ai.NewMetadataRunner(registry, cfg.AI.Metadata.Chain(), cfg.AI.Metadata.Temperature, cfg.AI.Metadata.LogConversations, logger) + if err != nil { + return err + } + + backgroundEmbeddings := foregroundEmbeddings + backgroundMetadata := foregroundMetadata + if cfg.AI.Background != nil { + if cfg.AI.Background.Embeddings != nil { + backgroundEmbeddings, err = ai.NewEmbeddingRunner(registry, cfg.AI.Background.Embeddings.AsTargets(), cfg.AI.Embeddings.Dimensions, logger) + if err != nil { + return err + } + } + if cfg.AI.Background.Metadata != nil { + backgroundMetadata, err = ai.NewMetadataRunner(registry, cfg.AI.Background.Metadata.AsTargets(), cfg.AI.Metadata.Temperature, cfg.AI.Metadata.LogConversations, logger) + if err != nil { + return err + } + } + } + var keyring *auth.Keyring var oauthRegistry *auth.OAuthRegistry var tokenStore *auth.TokenStore @@ -77,12 +103,13 @@ func Run(ctx context.Context, configPath string) error { dynClients := auth.NewDynamicClientStore() activeProjects := session.NewActiveProjects() - logger.Info("database connection verified", - slog.String("provider", provider.Name()), + logger.Info("ai providers initialised", + slog.String("embedding_primary", foregroundEmbeddings.PrimaryProvider()+"/"+foregroundEmbeddings.PrimaryModel()), + slog.String("metadata_primary", foregroundMetadata.PrimaryProvider()+"/"+foregroundMetadata.PrimaryModel()), ) if cfg.Backfill.Enabled && cfg.Backfill.RunOnStartup { - go runBackfillPass(ctx, db, provider, cfg.Backfill, logger) + go runBackfillPass(ctx, db, backgroundEmbeddings, cfg.Backfill, logger) } if cfg.Backfill.Enabled && cfg.Backfill.Interval > 0 { @@ -94,14 +121,14 @@ func Run(ctx context.Context, configPath string) error { case <-ctx.Done(): return case <-ticker.C: - runBackfillPass(ctx, db, provider, cfg.Backfill, logger) + runBackfillPass(ctx, db, backgroundEmbeddings, cfg.Backfill, logger) } } }() } if cfg.MetadataRetry.Enabled && cfg.MetadataRetry.RunOnStartup { - go runMetadataRetryPass(ctx, db, provider, cfg, activeProjects, logger) + go runMetadataRetryPass(ctx, db, backgroundMetadata, cfg, activeProjects, logger) } if cfg.MetadataRetry.Enabled && cfg.MetadataRetry.Interval > 0 { @@ -113,13 +140,13 @@ func Run(ctx context.Context, configPath string) error { case <-ctx.Done(): return case <-ticker.C: - runMetadataRetryPass(ctx, db, provider, cfg, activeProjects, logger) + runMetadataRetryPass(ctx, db, backgroundMetadata, cfg, activeProjects, logger) } } }() } - handler, err := routes(logger, cfg, info, db, provider, keyring, oauthRegistry, tokenStore, authCodes, dynClients, activeProjects) + handler, err := routes(logger, cfg, info, db, foregroundEmbeddings, foregroundMetadata, backgroundEmbeddings, backgroundMetadata, keyring, oauthRegistry, tokenStore, authCodes, dynClients, activeProjects) if err != nil { return err } @@ -156,33 +183,33 @@ func Run(ctx context.Context, configPath string) error { } } -func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *store.DB, provider ai.Provider, keyring *auth.Keyring, oauthRegistry *auth.OAuthRegistry, tokenStore *auth.TokenStore, authCodes *auth.AuthCodeStore, dynClients *auth.DynamicClientStore, activeProjects *session.ActiveProjects) (http.Handler, error) { +func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *store.DB, embeddings *ai.EmbeddingRunner, metadata *ai.MetadataRunner, bgEmbeddings *ai.EmbeddingRunner, bgMetadata *ai.MetadataRunner, keyring *auth.Keyring, oauthRegistry *auth.OAuthRegistry, tokenStore *auth.TokenStore, authCodes *auth.AuthCodeStore, dynClients *auth.DynamicClientStore, activeProjects *session.ActiveProjects) (http.Handler, error) { mux := http.NewServeMux() accessTracker := auth.NewAccessTracker() oauthEnabled := oauthRegistry != nil && tokenStore != nil authMiddleware := auth.Middleware(cfg.Auth, keyring, oauthRegistry, tokenStore, accessTracker, logger) filesTool := tools.NewFilesTool(db, activeProjects) - enrichmentRetryer := tools.NewEnrichmentRetryer(context.Background(), db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger) - backfillTool := tools.NewBackfillTool(db, provider, activeProjects, logger) + enrichmentRetryer := tools.NewEnrichmentRetryer(context.Background(), db, bgMetadata, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger) + backfillTool := tools.NewBackfillTool(db, bgEmbeddings, activeProjects, logger) toolSet := mcpserver.ToolSet{ - Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, enrichmentRetryer, backfillTool, logger), - Search: tools.NewSearchTool(db, provider, cfg.Search, activeProjects), + Capture: tools.NewCaptureTool(db, embeddings, metadata, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, nil, backfillTool, logger), + Search: tools.NewSearchTool(db, embeddings, cfg.Search, activeProjects), List: tools.NewListTool(db, cfg.Search, activeProjects), Stats: tools.NewStatsTool(db), Get: tools.NewGetTool(db), - Update: tools.NewUpdateTool(db, provider, cfg.Capture, logger), + Update: tools.NewUpdateTool(db, embeddings, metadata, cfg.Capture, logger), Delete: tools.NewDeleteTool(db), Archive: tools.NewArchiveTool(db), Projects: tools.NewProjectsTool(db, activeProjects), Version: tools.NewVersionTool(cfg.MCP.ServerName, info), - Context: tools.NewContextTool(db, provider, cfg.Search, activeProjects), - Recall: tools.NewRecallTool(db, provider, cfg.Search, activeProjects), - Summarize: tools.NewSummarizeTool(db, provider, cfg.Search, activeProjects), - Links: tools.NewLinksTool(db, provider, cfg.Search), + Context: tools.NewContextTool(db, embeddings, cfg.Search, activeProjects), + Recall: tools.NewRecallTool(db, embeddings, cfg.Search, activeProjects), + Summarize: tools.NewSummarizeTool(db, embeddings, metadata, cfg.Search, activeProjects), + Links: tools.NewLinksTool(db, embeddings, cfg.Search), Files: filesTool, Backfill: backfillTool, - Reparse: tools.NewReparseMetadataTool(db, provider, cfg.Capture, activeProjects, logger), + Reparse: tools.NewReparseMetadataTool(db, bgMetadata, cfg.Capture, activeProjects, logger), RetryMetadata: tools.NewRetryEnrichmentTool(enrichmentRetryer), Maintenance: tools.NewMaintenanceTool(db), Skills: tools.NewSkillsTool(db, activeProjects), @@ -242,8 +269,8 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st ), nil } -func runMetadataRetryPass(ctx context.Context, db *store.DB, provider ai.Provider, cfg *config.Config, activeProjects *session.ActiveProjects, logger *slog.Logger) { - retryer := tools.NewMetadataRetryer(ctx, db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger) +func runMetadataRetryPass(ctx context.Context, db *store.DB, metadataRunner *ai.MetadataRunner, cfg *config.Config, activeProjects *session.ActiveProjects, logger *slog.Logger) { + retryer := tools.NewMetadataRetryer(ctx, db, metadataRunner, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger) _, out, err := retryer.Handle(ctx, nil, tools.RetryMetadataInput{ Limit: cfg.MetadataRetry.MaxPerRun, IncludeArchived: cfg.MetadataRetry.IncludeArchived, @@ -261,8 +288,8 @@ func runMetadataRetryPass(ctx context.Context, db *store.DB, provider ai.Provide ) } -func runBackfillPass(ctx context.Context, db *store.DB, provider ai.Provider, cfg config.BackfillConfig, logger *slog.Logger) { - backfiller := tools.NewBackfillTool(db, provider, nil, logger) +func runBackfillPass(ctx context.Context, db *store.DB, embeddings *ai.EmbeddingRunner, cfg config.BackfillConfig, logger *slog.Logger) { + backfiller := tools.NewBackfillTool(db, embeddings, nil, logger) _, out, err := backfiller.Handle(ctx, nil, tools.BackfillInput{ Limit: cfg.MaxPerRun, IncludeArchived: cfg.IncludeArchived, diff --git a/internal/config/config.go b/internal/config/config.go index 46f8daa..281205a 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -8,6 +8,7 @@ const ( ) type Config struct { + Version int `yaml:"version"` Server ServerConfig `yaml:"server"` MCP MCPConfig `yaml:"mcp"` Auth AuthConfig `yaml:"auth"` @@ -37,11 +38,8 @@ type MCPConfig struct { Version string `yaml:"version"` Transport string `yaml:"transport"` SessionTimeout time.Duration `yaml:"session_timeout"` - // PublicURL is the externally reachable base URL of this server (e.g. https://amcs.example.com). - // When set, it is used to build absolute icon URLs in the MCP server identity. - PublicURL string `yaml:"public_url"` - // Instructions is set at startup from the embedded memory.md and sent to MCP clients on initialise. - Instructions string `yaml:"-"` + PublicURL string `yaml:"public_url"` + Instructions string `yaml:"-"` } type AuthConfig struct { @@ -77,52 +75,82 @@ type DatabaseConfig struct { MaxConnIdleTime time.Duration `yaml:"max_conn_idle_time"` } +// AIConfig (v2): named providers + per-role chains. type AIConfig struct { - Provider string `yaml:"provider"` - Embeddings AIEmbeddingConfig `yaml:"embeddings"` - Metadata AIMetadataConfig `yaml:"metadata"` - LiteLLM LiteLLMConfig `yaml:"litellm"` - Ollama OllamaConfig `yaml:"ollama"` - OpenRouter OpenRouterAIConfig `yaml:"openrouter"` + Providers map[string]ProviderConfig `yaml:"providers"` + Embeddings EmbeddingsRoleConfig `yaml:"embeddings"` + Metadata MetadataRoleConfig `yaml:"metadata"` + Background *BackgroundRolesConfig `yaml:"background,omitempty"` } -type AIEmbeddingConfig struct { - Model string `yaml:"model"` - Dimensions int `yaml:"dimensions"` +type ProviderConfig struct { + Type string `yaml:"type"` + BaseURL string `yaml:"base_url"` + APIKey string `yaml:"api_key"` + RequestHeaders map[string]string `yaml:"request_headers,omitempty"` + AppName string `yaml:"app_name,omitempty"` + SiteURL string `yaml:"site_url,omitempty"` } -type AIMetadataConfig struct { - Model string `yaml:"model"` - FallbackModels []string `yaml:"fallback_models"` - FallbackModel string `yaml:"fallback_model"` // legacy single fallback +type RoleTarget struct { + Provider string `yaml:"provider"` + Model string `yaml:"model"` +} + +type RoleChain struct { + Primary RoleTarget `yaml:"primary"` + Fallbacks []RoleTarget `yaml:"fallbacks,omitempty"` +} + +type EmbeddingsRoleConfig struct { + Dimensions int `yaml:"dimensions"` + Primary RoleTarget `yaml:"primary"` + Fallbacks []RoleTarget `yaml:"fallbacks,omitempty"` +} + +type MetadataRoleConfig struct { Temperature float64 `yaml:"temperature"` LogConversations bool `yaml:"log_conversations"` Timeout time.Duration `yaml:"timeout"` + Primary RoleTarget `yaml:"primary"` + Fallbacks []RoleTarget `yaml:"fallbacks,omitempty"` } -type LiteLLMConfig struct { - BaseURL string `yaml:"base_url"` - APIKey string `yaml:"api_key"` - UseResponsesAPI bool `yaml:"use_responses_api"` - RequestHeaders map[string]string `yaml:"request_headers"` - EmbeddingModel string `yaml:"embedding_model"` - MetadataModel string `yaml:"metadata_model"` - FallbackMetadataModels []string `yaml:"fallback_metadata_models"` - FallbackMetadataModel string `yaml:"fallback_metadata_model"` // legacy single fallback +// BackgroundRolesConfig overrides the foreground chains for background workers +// (backfill_embeddings, metadata_retry, reparse_metadata). Either field may be +// nil to inherit the foreground role unchanged. +type BackgroundRolesConfig struct { + Embeddings *RoleChain `yaml:"embeddings,omitempty"` + Metadata *RoleChain `yaml:"metadata,omitempty"` } -type OllamaConfig struct { - BaseURL string `yaml:"base_url"` - APIKey string `yaml:"api_key"` - RequestHeaders map[string]string `yaml:"request_headers"` +// Chain returns primary followed by fallbacks (deduped, blanks dropped). +func (e EmbeddingsRoleConfig) Chain() []RoleTarget { + return dedupeTargets(append([]RoleTarget{e.Primary}, e.Fallbacks...)) } -type OpenRouterAIConfig struct { - BaseURL string `yaml:"base_url"` - APIKey string `yaml:"api_key"` - AppName string `yaml:"app_name"` - SiteURL string `yaml:"site_url"` - ExtraHeaders map[string]string `yaml:"extra_headers"` +func (m MetadataRoleConfig) Chain() []RoleTarget { + return dedupeTargets(append([]RoleTarget{m.Primary}, m.Fallbacks...)) +} + +func (c RoleChain) AsTargets() []RoleTarget { + return dedupeTargets(append([]RoleTarget{c.Primary}, c.Fallbacks...)) +} + +func dedupeTargets(in []RoleTarget) []RoleTarget { + out := make([]RoleTarget, 0, len(in)) + seen := make(map[RoleTarget]struct{}, len(in)) + for _, t := range in { + if t.Provider == "" || t.Model == "" { + continue + } + if _, ok := seen[t]; ok { + continue + } + seen[t] = struct{}{} + out = append(out, t) + } + return out } type CaptureConfig struct { @@ -167,45 +195,3 @@ type MetadataRetryConfig struct { MaxPerRun int `yaml:"max_per_run"` IncludeArchived bool `yaml:"include_archived"` } - -func (c AIMetadataConfig) EffectiveFallbackModels() []string { - models := make([]string, 0, len(c.FallbackModels)+1) - for _, model := range c.FallbackModels { - if model != "" { - models = append(models, model) - } - } - if c.FallbackModel != "" { - models = append(models, c.FallbackModel) - } - return dedupeNonEmpty(models) -} - -func (c LiteLLMConfig) EffectiveFallbackMetadataModels() []string { - models := make([]string, 0, len(c.FallbackMetadataModels)+1) - for _, model := range c.FallbackMetadataModels { - if model != "" { - models = append(models, model) - } - } - if c.FallbackMetadataModel != "" { - models = append(models, c.FallbackMetadataModel) - } - return dedupeNonEmpty(models) -} - -func dedupeNonEmpty(values []string) []string { - seen := make(map[string]struct{}, len(values)) - out := make([]string, 0, len(values)) - for _, value := range values { - if value == "" { - continue - } - if _, ok := seen[value]; ok { - continue - } - seen[value] = struct{}{} - out = append(out, value) - } - return out -} diff --git a/internal/config/loader.go b/internal/config/loader.go index d6b0a88..f133b59 100644 --- a/internal/config/loader.go +++ b/internal/config/loader.go @@ -2,6 +2,7 @@ package config import ( "fmt" + "log/slog" "os" "strconv" "strings" @@ -12,6 +13,12 @@ import ( ) func Load(explicitPath string) (*Config, string, error) { + return LoadWithLogger(explicitPath, nil) +} + +// LoadWithLogger is Load with a logger surface for migration notices. Passing +// nil is fine — migration events will simply not be logged. +func LoadWithLogger(explicitPath string, log *slog.Logger) (*Config, string, error) { path := ResolvePath(explicitPath) data, err := os.ReadFile(path) @@ -19,10 +26,40 @@ func Load(explicitPath string) (*Config, string, error) { return nil, path, fmt.Errorf("read config %q: %w", path, err) } - cfg := defaultConfig() - if err := yaml.Unmarshal(data, &cfg); err != nil { + raw := map[string]any{} + if err := yaml.Unmarshal(data, &raw); err != nil { return nil, path, fmt.Errorf("decode config %q: %w", path, err) } + if raw == nil { + raw = map[string]any{} + } + + applied, err := Migrate(raw) + if err != nil { + return nil, path, fmt.Errorf("migrate config %q: %w", path, err) + } + + if len(applied) > 0 { + if err := rewriteConfigFile(path, data, raw); err != nil { + return nil, path, err + } + if log != nil { + for _, step := range applied { + log.Warn("config migrated", + slog.String("path", path), + slog.Int("from_version", step.From), + slog.Int("to_version", step.To), + slog.String("describe", step.Describe), + ) + } + } + } + + cfg, err := decodeTyped(raw) + if err != nil { + return nil, path, fmt.Errorf("decode migrated config %q: %w", path, err) + } + cfg.Version = CurrentConfigVersion applyEnvOverrides(&cfg) if err := cfg.Validate(); err != nil { @@ -32,6 +69,34 @@ func Load(explicitPath string) (*Config, string, error) { return &cfg, path, nil } +func decodeTyped(raw map[string]any) (Config, error) { + out, err := yaml.Marshal(raw) + if err != nil { + return Config{}, fmt.Errorf("re-marshal migrated config: %w", err) + } + cfg := defaultConfig() + if err := yaml.Unmarshal(out, &cfg); err != nil { + return Config{}, err + } + return cfg, nil +} + +func rewriteConfigFile(path string, original []byte, migrated map[string]any) error { + backupPath := fmt.Sprintf("%s.bak.%d", path, time.Now().Unix()) + if err := os.WriteFile(backupPath, original, 0o600); err != nil { + return fmt.Errorf("write backup %q: %w", backupPath, err) + } + + out, err := yaml.Marshal(migrated) + if err != nil { + return fmt.Errorf("marshal migrated config: %w", err) + } + if err := os.WriteFile(path, out, 0o600); err != nil { + return fmt.Errorf("write migrated config %q: %w", path, err) + } + return nil +} + func ResolvePath(explicitPath string) string { if path := strings.TrimSpace(explicitPath); path != "" { if path != ".yaml" && path != ".yml" { @@ -49,6 +114,7 @@ func ResolvePath(explicitPath string) string { func defaultConfig() Config { info := buildinfo.Current() return Config{ + Version: CurrentConfigVersion, Server: ServerConfig{ Host: "0.0.0.0", Port: 8080, @@ -69,20 +135,14 @@ func defaultConfig() Config { QueryParam: "key", }, AI: AIConfig{ - Provider: "litellm", - Embeddings: AIEmbeddingConfig{ - Model: "openai/text-embedding-3-small", + Providers: map[string]ProviderConfig{}, + Embeddings: EmbeddingsRoleConfig{ Dimensions: 1536, }, - Metadata: AIMetadataConfig{ - Model: "gpt-4o-mini", + Metadata: MetadataRoleConfig{ Temperature: 0.1, Timeout: 10 * time.Second, }, - Ollama: OllamaConfig{ - BaseURL: "http://localhost:11434/v1", - APIKey: "ollama", - }, }, Capture: CaptureConfig{ Source: DefaultSource, @@ -119,11 +179,12 @@ func defaultConfig() Config { func applyEnvOverrides(cfg *Config) { overrideString(&cfg.Database.URL, "AMCS_DATABASE_URL") overrideString(&cfg.MCP.PublicURL, "AMCS_PUBLIC_URL") - overrideString(&cfg.AI.LiteLLM.BaseURL, "AMCS_LITELLM_BASE_URL") - overrideString(&cfg.AI.LiteLLM.APIKey, "AMCS_LITELLM_API_KEY") - overrideString(&cfg.AI.Ollama.BaseURL, "AMCS_OLLAMA_BASE_URL") - overrideString(&cfg.AI.Ollama.APIKey, "AMCS_OLLAMA_API_KEY") - overrideString(&cfg.AI.OpenRouter.APIKey, "AMCS_OPENROUTER_API_KEY") + + overrideProviderField(cfg, "AMCS_LITELLM_BASE_URL", "litellm", func(p *ProviderConfig, v string) { p.BaseURL = v }) + overrideProviderField(cfg, "AMCS_LITELLM_API_KEY", "litellm", func(p *ProviderConfig, v string) { p.APIKey = v }) + overrideProviderField(cfg, "AMCS_OLLAMA_BASE_URL", "ollama", func(p *ProviderConfig, v string) { p.BaseURL = v }) + overrideProviderField(cfg, "AMCS_OLLAMA_API_KEY", "ollama", func(p *ProviderConfig, v string) { p.APIKey = v }) + overrideProviderField(cfg, "AMCS_OPENROUTER_API_KEY", "openrouter", func(p *ProviderConfig, v string) { p.APIKey = v }) if value, ok := os.LookupEnv("AMCS_SERVER_PORT"); ok { if port, err := strconv.Atoi(strings.TrimSpace(value)); err == nil { @@ -132,6 +193,24 @@ func applyEnvOverrides(cfg *Config) { } } +// overrideProviderField applies an env var to every configured provider of the +// given type. This preserves the v1 behaviour where e.g. AMCS_LITELLM_API_KEY +// rewrote the single litellm block — in v2 it rewrites every litellm provider. +func overrideProviderField(cfg *Config, envKey, providerType string, apply func(*ProviderConfig, string)) { + value, ok := os.LookupEnv(envKey) + if !ok { + return + } + value = strings.TrimSpace(value) + for name, p := range cfg.AI.Providers { + if p.Type != providerType { + continue + } + apply(&p, value) + cfg.AI.Providers[name] = p + } +} + func overrideString(target *string, envKey string) { if value, ok := os.LookupEnv(envKey); ok { *target = strings.TrimSpace(value) diff --git a/internal/config/loader_test.go b/internal/config/loader_test.go index ac1025b..c0d685c 100644 --- a/internal/config/loader_test.go +++ b/internal/config/loader_test.go @@ -31,9 +31,8 @@ func TestResolvePathIgnoresBareYAMLExtension(t *testing.T) { } } -func TestLoadAppliesEnvOverrides(t *testing.T) { - configPath := filepath.Join(t.TempDir(), "test.yaml") - if err := os.WriteFile(configPath, []byte(` +const v2ConfigYAML = ` +version: 2 server: port: 8080 mcp: @@ -46,18 +45,30 @@ auth: database: url: "postgres://from-file" ai: - provider: "litellm" + providers: + default: + type: "litellm" + base_url: "http://localhost:4000/v1" + api_key: "file-key" embeddings: dimensions: 1536 - litellm: - base_url: "http://localhost:4000/v1" - api_key: "file-key" + primary: + provider: "default" + model: "text-embed" + metadata: + primary: + provider: "default" + model: "gpt-4" search: default_limit: 10 max_limit: 50 logging: level: "info" -`), 0o600); err != nil { +` + +func TestLoadAppliesEnvOverrides(t *testing.T) { + configPath := filepath.Join(t.TempDir(), "test.yaml") + if err := os.WriteFile(configPath, []byte(v2ConfigYAML), 0o600); err != nil { t.Fatalf("write config: %v", err) } @@ -76,8 +87,8 @@ logging: if cfg.Database.URL != "postgres://from-env" { t.Fatalf("database url = %q, want env override", cfg.Database.URL) } - if cfg.AI.LiteLLM.APIKey != "env-key" { - t.Fatalf("litellm api key = %q, want env override", cfg.AI.LiteLLM.APIKey) + if cfg.AI.Providers["default"].APIKey != "env-key" { + t.Fatalf("litellm api key = %q, want env override", cfg.AI.Providers["default"].APIKey) } if cfg.Server.Port != 9090 { t.Fatalf("server port = %d, want 9090", cfg.Server.Port) @@ -90,10 +101,12 @@ logging: func TestLoadAppliesOllamaEnvOverrides(t *testing.T) { configPath := filepath.Join(t.TempDir(), "test.yaml") if err := os.WriteFile(configPath, []byte(` +version: 2 server: port: 8080 mcp: path: "/mcp" + session_timeout: "10m" auth: keys: - id: "test" @@ -101,15 +114,20 @@ auth: database: url: "postgres://from-file" ai: - provider: "ollama" + providers: + local: + type: "ollama" + base_url: "http://localhost:11434/v1" + api_key: "ollama" embeddings: - model: "nomic-embed-text" dimensions: 768 + primary: + provider: "local" + model: "nomic-embed-text" metadata: - model: "llama3.2" - ollama: - base_url: "http://localhost:11434/v1" - api_key: "ollama" + primary: + provider: "local" + model: "llama3.2" search: default_limit: 10 max_limit: 50 @@ -127,10 +145,77 @@ logging: t.Fatalf("Load() error = %v", err) } - if cfg.AI.Ollama.BaseURL != "https://ollama.example.com/v1" { - t.Fatalf("ollama base url = %q, want env override", cfg.AI.Ollama.BaseURL) + p := cfg.AI.Providers["local"] + if p.BaseURL != "https://ollama.example.com/v1" { + t.Fatalf("ollama base url = %q, want env override", p.BaseURL) } - if cfg.AI.Ollama.APIKey != "remote-key" { - t.Fatalf("ollama api key = %q, want env override", cfg.AI.Ollama.APIKey) + if p.APIKey != "remote-key" { + t.Fatalf("ollama api key = %q, want env override", p.APIKey) + } +} + +func TestLoadMigratesV1Config(t *testing.T) { + configPath := filepath.Join(t.TempDir(), "v1.yaml") + v1 := ` +server: + port: 8080 +mcp: + path: "/mcp" + session_timeout: "10m" +auth: + keys: + - id: "test" + value: "secret" +database: + url: "postgres://from-file" +ai: + provider: "litellm" + embeddings: + model: "text-embed" + dimensions: 1536 + metadata: + model: "gpt-4" + temperature: 0.2 + fallback_models: ["gpt-3.5"] + litellm: + base_url: "http://localhost:4000/v1" + api_key: "file-key" +search: + default_limit: 10 + max_limit: 50 +logging: + level: "info" +` + if err := os.WriteFile(configPath, []byte(v1), 0o600); err != nil { + t.Fatalf("write config: %v", err) + } + + cfg, _, err := Load(configPath) + if err != nil { + t.Fatalf("Load() error = %v", err) + } + + if cfg.Version != CurrentConfigVersion { + t.Fatalf("version = %d, want %d", cfg.Version, CurrentConfigVersion) + } + if p, ok := cfg.AI.Providers["default"]; !ok || p.Type != "litellm" || p.APIKey != "file-key" { + t.Fatalf("providers[default] = %+v, want litellm/file-key", p) + } + if cfg.AI.Embeddings.Primary.Model != "text-embed" || cfg.AI.Embeddings.Primary.Provider != "default" { + t.Fatalf("embeddings.primary = %+v, want default/text-embed", cfg.AI.Embeddings.Primary) + } + if cfg.AI.Metadata.Primary.Model != "gpt-4" || cfg.AI.Metadata.Primary.Provider != "default" { + t.Fatalf("metadata.primary = %+v, want default/gpt-4", cfg.AI.Metadata.Primary) + } + if len(cfg.AI.Metadata.Fallbacks) != 1 || cfg.AI.Metadata.Fallbacks[0].Model != "gpt-3.5" { + t.Fatalf("metadata.fallbacks = %+v, want [default/gpt-3.5]", cfg.AI.Metadata.Fallbacks) + } + + entries, err := filepath.Glob(configPath + ".bak.*") + if err != nil { + t.Fatalf("glob backups: %v", err) + } + if len(entries) != 1 { + t.Fatalf("backup files = %d, want 1", len(entries)) } } diff --git a/internal/config/migrate.go b/internal/config/migrate.go new file mode 100644 index 0000000..8f88040 --- /dev/null +++ b/internal/config/migrate.go @@ -0,0 +1,341 @@ +package config + +import ( + "fmt" + "sort" +) + +// CurrentConfigVersion is the schema version this binary expects. Files at a +// lower version are migrated automatically when loaded. +const CurrentConfigVersion = 2 + +// ConfigMigration upgrades a raw YAML map by one version. +type ConfigMigration struct { + From, To int + Describe string + Apply func(map[string]any) error +} + +// migrations is the ordered ladder of upgrades. Add new entries at the end. +var migrations = []ConfigMigration{ + {From: 1, To: 2, Describe: "named providers + role chains", Apply: migrateV1toV2}, +} + +// Migrate brings raw up to CurrentConfigVersion in place. Returns the list of +// migrations that were applied (may be empty if already current). +func Migrate(raw map[string]any) ([]ConfigMigration, error) { + if raw == nil { + return nil, fmt.Errorf("migrate: raw config is nil") + } + + version := readVersion(raw) + if version > CurrentConfigVersion { + return nil, fmt.Errorf("migrate: config version %d is newer than supported version %d", version, CurrentConfigVersion) + } + + applied := make([]ConfigMigration, 0) + for { + if version >= CurrentConfigVersion { + break + } + step, ok := findMigration(version) + if !ok { + return nil, fmt.Errorf("migrate: no migration registered from version %d", version) + } + if err := step.Apply(raw); err != nil { + return nil, fmt.Errorf("migrate v%d->v%d: %w", step.From, step.To, err) + } + raw["version"] = step.To + version = step.To + applied = append(applied, step) + } + return applied, nil +} + +func findMigration(from int) (ConfigMigration, bool) { + for _, m := range migrations { + if m.From == from { + return m, true + } + } + return ConfigMigration{}, false +} + +// readVersion returns the version from raw. Files without a version field are +// treated as version 1 (the original schema). +func readVersion(raw map[string]any) int { + v, ok := raw["version"] + if !ok { + return 1 + } + switch n := v.(type) { + case int: + return n + case int64: + return int(n) + case float64: + return int(n) + } + return 1 +} + +// migrateV1toV2 lifts the single-provider config into the named-providers + +// role-chains layout. The pre-v2 config implicitly used one provider for both +// embeddings and metadata; we materialise that as a provider named "default". +func migrateV1toV2(raw map[string]any) error { + aiRaw := mapValue(raw, "ai") + if aiRaw == nil { + aiRaw = map[string]any{} + } + + providerType := stringValue(aiRaw, "provider") + if providerType == "" { + providerType = "litellm" + } + + providers, embeddingModel, metadataModel, fallbackModels := buildV1Provider(aiRaw, providerType) + + embeddingsOld := mapValue(aiRaw, "embeddings") + dimensions := intValue(embeddingsOld, "dimensions") + if dimensions <= 0 { + dimensions = 1536 + } + if embeddingModel == "" { + embeddingModel = stringValue(embeddingsOld, "model") + } + + metadataOld := mapValue(aiRaw, "metadata") + if metadataModel == "" { + metadataModel = stringValue(metadataOld, "model") + } + temperature := floatValue(metadataOld, "temperature") + logConversations := boolValue(metadataOld, "log_conversations") + timeoutStr := stringValue(metadataOld, "timeout") + + if list := stringListValue(metadataOld, "fallback_models"); len(list) > 0 { + fallbackModels = append(fallbackModels, list...) + } + if v := stringValue(metadataOld, "fallback_model"); v != "" { + fallbackModels = append(fallbackModels, v) + } + + embeddings := map[string]any{ + "dimensions": dimensions, + "primary": map[string]any{"provider": "default", "model": embeddingModel}, + } + + metadata := map[string]any{ + "temperature": temperature, + "log_conversations": logConversations, + "primary": map[string]any{"provider": "default", "model": metadataModel}, + } + if timeoutStr != "" { + metadata["timeout"] = timeoutStr + } + if fallbacks := chainTargets("default", fallbackModels); len(fallbacks) > 0 { + metadata["fallbacks"] = fallbacks + } + + raw["ai"] = map[string]any{ + "providers": providers, + "embeddings": embeddings, + "metadata": metadata, + } + return nil +} + +func buildV1Provider(aiRaw map[string]any, providerType string) (map[string]any, string, string, []string) { + providers := map[string]any{} + defaultEntry := map[string]any{"type": providerType} + embedModel := "" + metaModel := "" + var fallbacks []string + + switch providerType { + case "litellm": + block := mapValue(aiRaw, "litellm") + copyKeys(defaultEntry, block, "base_url", "api_key") + copyHeaders(defaultEntry, block, "request_headers") + embedModel = stringValue(block, "embedding_model") + metaModel = stringValue(block, "metadata_model") + if list := stringListValue(block, "fallback_metadata_models"); len(list) > 0 { + fallbacks = append(fallbacks, list...) + } + if v := stringValue(block, "fallback_metadata_model"); v != "" { + fallbacks = append(fallbacks, v) + } + case "ollama": + block := mapValue(aiRaw, "ollama") + copyKeys(defaultEntry, block, "base_url", "api_key") + copyHeaders(defaultEntry, block, "request_headers") + case "openrouter": + block := mapValue(aiRaw, "openrouter") + copyKeys(defaultEntry, block, "base_url", "api_key", "app_name", "site_url") + copyHeaders(defaultEntry, block, "extra_headers") + // rename: extra_headers → request_headers + if hdr, ok := defaultEntry["extra_headers"]; ok { + defaultEntry["request_headers"] = hdr + delete(defaultEntry, "extra_headers") + } + } + + providers["default"] = defaultEntry + return providers, embedModel, metaModel, fallbacks +} + +func chainTargets(provider string, models []string) []any { + out := make([]any, 0, len(models)) + seen := map[string]struct{}{} + for _, m := range models { + if m == "" { + continue + } + key := provider + "|" + m + if _, ok := seen[key]; ok { + continue + } + seen[key] = struct{}{} + out = append(out, map[string]any{"provider": provider, "model": m}) + } + return out +} + +func mapValue(raw map[string]any, key string) map[string]any { + if raw == nil { + return nil + } + v, ok := raw[key] + if !ok { + return nil + } + switch m := v.(type) { + case map[string]any: + return m + case map[any]any: + return convertAnyMap(m) + } + return nil +} + +func convertAnyMap(in map[any]any) map[string]any { + out := make(map[string]any, len(in)) + keys := make([]string, 0, len(in)) + for k, v := range in { + ks, ok := k.(string) + if !ok { + continue + } + keys = append(keys, ks) + out[ks] = v + } + sort.Strings(keys) + return out +} + +func stringValue(raw map[string]any, key string) string { + if raw == nil { + return "" + } + v, ok := raw[key] + if !ok { + return "" + } + if s, ok := v.(string); ok { + return s + } + return "" +} + +func intValue(raw map[string]any, key string) int { + if raw == nil { + return 0 + } + switch n := raw[key].(type) { + case int: + return n + case int64: + return int(n) + case float64: + return int(n) + } + return 0 +} + +func floatValue(raw map[string]any, key string) float64 { + if raw == nil { + return 0 + } + switch n := raw[key].(type) { + case float64: + return n + case int: + return float64(n) + case int64: + return float64(n) + } + return 0 +} + +func boolValue(raw map[string]any, key string) bool { + if raw == nil { + return false + } + if b, ok := raw[key].(bool); ok { + return b + } + return false +} + +func stringListValue(raw map[string]any, key string) []string { + if raw == nil { + return nil + } + v, ok := raw[key] + if !ok { + return nil + } + list, ok := v.([]any) + if !ok { + return nil + } + out := make([]string, 0, len(list)) + for _, item := range list { + if s, ok := item.(string); ok && s != "" { + out = append(out, s) + } + } + return out +} + +func copyKeys(dst, src map[string]any, keys ...string) { + if src == nil { + return + } + for _, k := range keys { + if v, ok := src[k]; ok { + dst[k] = v + } + } +} + +func copyHeaders(dst, src map[string]any, key string) { + if src == nil { + return + } + v, ok := src[key] + if !ok { + return + } + switch headers := v.(type) { + case map[string]any: + if len(headers) == 0 { + return + } + dst[key] = headers + case map[any]any: + if len(headers) == 0 { + return + } + dst[key] = convertAnyMap(headers) + } +} diff --git a/internal/config/migrate_test.go b/internal/config/migrate_test.go new file mode 100644 index 0000000..b3771f6 --- /dev/null +++ b/internal/config/migrate_test.go @@ -0,0 +1,77 @@ +package config + +import "testing" + +func TestMigrateV1ToV2Litellm(t *testing.T) { + raw := map[string]any{ + "ai": map[string]any{ + "provider": "litellm", + "embeddings": map[string]any{ + "model": "text-embedding-3-small", + "dimensions": 1536, + }, + "metadata": map[string]any{ + "model": "gpt-4o-mini", + "temperature": 0.2, + "fallback_models": []any{"gpt-4.1-mini"}, + }, + "litellm": map[string]any{ + "base_url": "http://localhost:4000/v1", + "api_key": "secret", + }, + }, + } + + applied, err := Migrate(raw) + if err != nil { + t.Fatalf("Migrate() error = %v", err) + } + if len(applied) != 1 || applied[0].From != 1 || applied[0].To != 2 { + t.Fatalf("applied = %+v, want [v1->v2]", applied) + } + if got := readVersion(raw); got != CurrentConfigVersion { + t.Fatalf("version = %d, want %d", got, CurrentConfigVersion) + } + + ai := mapValue(raw, "ai") + providers := mapValue(ai, "providers") + def := mapValue(providers, "default") + if got := stringValue(def, "type"); got != "litellm" { + t.Fatalf("providers.default.type = %q, want litellm", got) + } + if got := stringValue(def, "base_url"); got != "http://localhost:4000/v1" { + t.Fatalf("providers.default.base_url = %q", got) + } + + emb := mapValue(ai, "embeddings") + embPrimary := mapValue(emb, "primary") + if stringValue(embPrimary, "provider") != "default" || stringValue(embPrimary, "model") != "text-embedding-3-small" { + t.Fatalf("embeddings.primary = %+v, want default/text-embedding-3-small", embPrimary) + } + + meta := mapValue(ai, "metadata") + metaPrimary := mapValue(meta, "primary") + if stringValue(metaPrimary, "provider") != "default" || stringValue(metaPrimary, "model") != "gpt-4o-mini" { + t.Fatalf("metadata.primary = %+v, want default/gpt-4o-mini", metaPrimary) + } + fallbacks, ok := meta["fallbacks"].([]any) + if !ok || len(fallbacks) != 1 { + t.Fatalf("metadata.fallbacks = %#v, want len=1", meta["fallbacks"]) + } + firstFallback, ok := fallbacks[0].(map[string]any) + if !ok { + t.Fatalf("metadata.fallbacks[0] type = %T, want map[string]any", fallbacks[0]) + } + if stringValue(firstFallback, "provider") != "default" || stringValue(firstFallback, "model") != "gpt-4.1-mini" { + t.Fatalf("metadata fallback = %+v, want default/gpt-4.1-mini", firstFallback) + } +} + +func TestMigrateRejectsNewerVersion(t *testing.T) { + raw := map[string]any{"version": CurrentConfigVersion + 1} + + _, err := Migrate(raw) + if err == nil { + t.Fatal("Migrate() error = nil, want error for newer config version") + } +} diff --git a/internal/config/validate.go b/internal/config/validate.go index af40b37..33a8bd8 100644 --- a/internal/config/validate.go +++ b/internal/config/validate.go @@ -45,38 +45,8 @@ func (c Config) Validate() error { return fmt.Errorf("invalid config: mcp.session_timeout must be greater than zero") } - switch c.AI.Provider { - case "litellm", "ollama", "openrouter": - default: - return fmt.Errorf("invalid config: unsupported ai.provider %q", c.AI.Provider) - } - - if c.AI.Embeddings.Dimensions <= 0 { - return fmt.Errorf("invalid config: ai.embeddings.dimensions must be greater than zero") - } - - switch c.AI.Provider { - case "litellm": - if strings.TrimSpace(c.AI.LiteLLM.BaseURL) == "" { - return fmt.Errorf("invalid config: ai.litellm.base_url is required when ai.provider=litellm") - } - if strings.TrimSpace(c.AI.LiteLLM.APIKey) == "" { - return fmt.Errorf("invalid config: ai.litellm.api_key is required when ai.provider=litellm") - } - case "ollama": - if strings.TrimSpace(c.AI.Ollama.BaseURL) == "" { - return fmt.Errorf("invalid config: ai.ollama.base_url is required when ai.provider=ollama") - } - if strings.TrimSpace(c.AI.Ollama.APIKey) == "" { - return fmt.Errorf("invalid config: ai.ollama.api_key is required when ai.provider=ollama") - } - case "openrouter": - if strings.TrimSpace(c.AI.OpenRouter.BaseURL) == "" { - return fmt.Errorf("invalid config: ai.openrouter.base_url is required when ai.provider=openrouter") - } - if strings.TrimSpace(c.AI.OpenRouter.APIKey) == "" { - return fmt.Errorf("invalid config: ai.openrouter.api_key is required when ai.provider=openrouter") - } + if err := c.AI.validate(); err != nil { + return err } if c.Server.Port <= 0 { @@ -108,3 +78,61 @@ func (c Config) Validate() error { return nil } + +func (a AIConfig) validate() error { + if len(a.Providers) == 0 { + return fmt.Errorf("invalid config: ai.providers must contain at least one entry") + } + for name, p := range a.Providers { + if strings.TrimSpace(name) == "" { + return fmt.Errorf("invalid config: ai.providers contains an entry with an empty name") + } + switch p.Type { + case "litellm", "ollama", "openrouter": + default: + return fmt.Errorf("invalid config: ai.providers.%s.type %q is not supported", name, p.Type) + } + if strings.TrimSpace(p.BaseURL) == "" { + return fmt.Errorf("invalid config: ai.providers.%s.base_url is required", name) + } + if strings.TrimSpace(p.APIKey) == "" { + return fmt.Errorf("invalid config: ai.providers.%s.api_key is required", name) + } + } + + if a.Embeddings.Dimensions <= 0 { + return fmt.Errorf("invalid config: ai.embeddings.dimensions must be greater than zero") + } + + if err := a.validateChain("ai.embeddings", a.Embeddings.Chain()); err != nil { + return err + } + if err := a.validateChain("ai.metadata", a.Metadata.Chain()); err != nil { + return err + } + if a.Background != nil { + if a.Background.Embeddings != nil { + if err := a.validateChain("ai.background.embeddings", a.Background.Embeddings.AsTargets()); err != nil { + return err + } + } + if a.Background.Metadata != nil { + if err := a.validateChain("ai.background.metadata", a.Background.Metadata.AsTargets()); err != nil { + return err + } + } + } + return nil +} + +func (a AIConfig) validateChain(prefix string, chain []RoleTarget) error { + if len(chain) == 0 { + return fmt.Errorf("invalid config: %s.primary must reference a configured provider and model", prefix) + } + for i, target := range chain { + if _, ok := a.Providers[target.Provider]; !ok { + return fmt.Errorf("invalid config: %s[%d] references unknown provider %q", prefix, i, target.Provider) + } + } + return nil +} diff --git a/internal/config/validate_test.go b/internal/config/validate_test.go index 8cea09c..e0272ea 100644 --- a/internal/config/validate_test.go +++ b/internal/config/validate_test.go @@ -7,28 +7,23 @@ import ( func validConfig() Config { return Config{ - Server: ServerConfig{Port: 8080}, - MCP: MCPConfig{Path: "/mcp", SessionTimeout: 10 * time.Minute}, + Version: CurrentConfigVersion, + Server: ServerConfig{Port: 8080}, + MCP: MCPConfig{Path: "/mcp", SessionTimeout: 10 * time.Minute}, Auth: AuthConfig{ Keys: []APIKey{{ID: "test", Value: "secret"}}, }, Database: DatabaseConfig{URL: "postgres://example"}, AI: AIConfig{ - Provider: "litellm", - Embeddings: AIEmbeddingConfig{ + Providers: map[string]ProviderConfig{ + "default": {Type: "litellm", BaseURL: "http://localhost:4000/v1", APIKey: "key"}, + }, + Embeddings: EmbeddingsRoleConfig{ Dimensions: 1536, + Primary: RoleTarget{Provider: "default", Model: "text-embed"}, }, - LiteLLM: LiteLLMConfig{ - BaseURL: "http://localhost:4000/v1", - APIKey: "key", - }, - Ollama: OllamaConfig{ - BaseURL: "http://localhost:11434/v1", - APIKey: "ollama", - }, - OpenRouter: OpenRouterAIConfig{ - BaseURL: "https://openrouter.ai/api/v1", - APIKey: "key", + Metadata: MetadataRoleConfig{ + Primary: RoleTarget{Provider: "default", Model: "gpt-4"}, }, }, Search: SearchConfig{DefaultLimit: 10, MaxLimit: 50}, @@ -36,29 +31,44 @@ func validConfig() Config { } } -func TestValidateAcceptsSupportedProviders(t *testing.T) { - cfg := validConfig() - if err := cfg.Validate(); err != nil { - t.Fatalf("Validate litellm error = %v", err) - } - - cfg.AI.Provider = "ollama" - if err := cfg.Validate(); err != nil { - t.Fatalf("Validate ollama error = %v", err) - } - - cfg.AI.Provider = "openrouter" - if err := cfg.Validate(); err != nil { - t.Fatalf("Validate openrouter error = %v", err) +func TestValidateAcceptsSupportedProviderTypes(t *testing.T) { + for _, providerType := range []string{"litellm", "ollama", "openrouter"} { + cfg := validConfig() + p := cfg.AI.Providers["default"] + p.Type = providerType + cfg.AI.Providers["default"] = p + if err := cfg.Validate(); err != nil { + t.Fatalf("Validate %s error = %v", providerType, err) + } } } -func TestValidateRejectsInvalidProvider(t *testing.T) { +func TestValidateRejectsInvalidProviderType(t *testing.T) { cfg := validConfig() - cfg.AI.Provider = "unknown" + p := cfg.AI.Providers["default"] + p.Type = "unknown" + cfg.AI.Providers["default"] = p if err := cfg.Validate(); err == nil { - t.Fatal("Validate() error = nil, want error for unsupported provider") + t.Fatal("Validate() error = nil, want error for unsupported provider type") + } +} + +func TestValidateRejectsChainWithUnknownProvider(t *testing.T) { + cfg := validConfig() + cfg.AI.Metadata.Primary = RoleTarget{Provider: "does-not-exist", Model: "x"} + + if err := cfg.Validate(); err == nil { + t.Fatal("Validate() error = nil, want error for chain referencing unknown provider") + } +} + +func TestValidateRejectsEmptyProviders(t *testing.T) { + cfg := validConfig() + cfg.AI.Providers = map[string]ProviderConfig{} + + if err := cfg.Validate(); err == nil { + t.Fatal("Validate() error = nil, want error for empty providers") } } diff --git a/internal/mcpserver/server.go b/internal/mcpserver/server.go index 8f3426f..c9e55b5 100644 --- a/internal/mcpserver/server.go +++ b/internal/mcpserver/server.go @@ -35,7 +35,7 @@ type ToolSet struct { Files *tools.FilesTool Backfill *tools.BackfillTool Reparse *tools.ReparseMetadataTool - RetryMetadata *tools.RetryMetadataTool + RetryMetadata *tools.RetryEnrichmentTool Maintenance *tools.MaintenanceTool Skills *tools.SkillsTool ChatHistory *tools.ChatHistoryTool diff --git a/internal/mcpserver/streamable_integration_test.go b/internal/mcpserver/streamable_integration_test.go index 699b178..89a82a7 100644 --- a/internal/mcpserver/streamable_integration_test.go +++ b/internal/mcpserver/streamable_integration_test.go @@ -126,7 +126,7 @@ func streamableTestToolSet() ToolSet { Files: new(tools.FilesTool), Backfill: new(tools.BackfillTool), Reparse: new(tools.ReparseMetadataTool), - RetryMetadata: new(tools.RetryMetadataTool), + RetryMetadata: new(tools.RetryEnrichmentTool), Maintenance: new(tools.MaintenanceTool), Skills: new(tools.SkillsTool), } diff --git a/internal/tools/backfill.go b/internal/tools/backfill.go index 521a9b7..52d96bc 100644 --- a/internal/tools/backfill.go +++ b/internal/tools/backfill.go @@ -18,10 +18,10 @@ import ( const backfillConcurrency = 4 type BackfillTool struct { - store *store.DB - provider ai.Provider - sessions *session.ActiveProjects - logger *slog.Logger + store *store.DB + embeddings *ai.EmbeddingRunner + sessions *session.ActiveProjects + logger *slog.Logger } type BackfillInput struct { @@ -47,15 +47,15 @@ type BackfillOutput struct { Failures []BackfillFailure `json:"failures,omitempty"` } -func NewBackfillTool(db *store.DB, provider ai.Provider, sessions *session.ActiveProjects, logger *slog.Logger) *BackfillTool { - return &BackfillTool{store: db, provider: provider, sessions: sessions, logger: logger} +func NewBackfillTool(db *store.DB, embeddings *ai.EmbeddingRunner, sessions *session.ActiveProjects, logger *slog.Logger) *BackfillTool { + return &BackfillTool{store: db, embeddings: embeddings, sessions: sessions, logger: logger} } // QueueThought queues a single thought for background embedding generation. // It is used by capture when the embedding provider is temporarily unavailable. func (t *BackfillTool) QueueThought(ctx context.Context, id uuid.UUID, content string) { go func() { - vec, err := t.provider.Embed(ctx, content) + result, err := t.embeddings.Embed(ctx, content) if err != nil { t.logger.Warn("background embedding retry failed", slog.String("thought_id", id.String()), @@ -63,15 +63,17 @@ func (t *BackfillTool) QueueThought(ctx context.Context, id uuid.UUID, content s ) return } - model := t.provider.EmbeddingModel() - if err := t.store.UpsertEmbedding(ctx, id, model, vec); err != nil { + if err := t.store.UpsertEmbedding(ctx, id, result.Model, result.Vector); err != nil { t.logger.Warn("background embedding upsert failed", slog.String("thought_id", id.String()), slog.String("error", err.Error()), ) return } - t.logger.Info("background embedding retry succeeded", slog.String("thought_id", id.String())) + t.logger.Info("background embedding retry succeeded", + slog.String("thought_id", id.String()), + slog.String("model", result.Model), + ) }() } @@ -91,15 +93,15 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in projectID = &project.ID } - model := t.provider.EmbeddingModel() + primaryModel := t.embeddings.PrimaryModel() - thoughts, err := t.store.ListThoughtsMissingEmbedding(ctx, model, limit, projectID, in.IncludeArchived, in.OlderThanDays) + thoughts, err := t.store.ListThoughtsMissingEmbedding(ctx, primaryModel, limit, projectID, in.IncludeArchived, in.OlderThanDays) if err != nil { return nil, BackfillOutput{}, err } out := BackfillOutput{ - Model: model, + Model: primaryModel, Scanned: len(thoughts), DryRun: in.DryRun, } @@ -125,7 +127,7 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in defer wg.Done() defer sem.Release(1) - vec, embedErr := t.provider.Embed(ctx, content) + result, embedErr := t.embeddings.Embed(ctx, content) if embedErr != nil { mu.Lock() out.Failures = append(out.Failures, BackfillFailure{ID: id.String(), Error: embedErr.Error()}) @@ -134,7 +136,7 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in return } - if upsertErr := t.store.UpsertEmbedding(ctx, id, model, vec); upsertErr != nil { + if upsertErr := t.store.UpsertEmbedding(ctx, id, result.Model, result.Vector); upsertErr != nil { mu.Lock() out.Failures = append(out.Failures, BackfillFailure{ID: id.String(), Error: upsertErr.Error()}) mu.Unlock() @@ -154,7 +156,7 @@ func (t *BackfillTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in out.Skipped = out.Scanned - out.Embedded - out.Failed t.logger.Info("backfill completed", - slog.String("model", model), + slog.String("model", primaryModel), slog.Int("scanned", out.Scanned), slog.Int("embedded", out.Embedded), slog.Int("failed", out.Failed), diff --git a/internal/tools/capture.go b/internal/tools/capture.go index eb33ea8..4005410 100644 --- a/internal/tools/capture.go +++ b/internal/tools/capture.go @@ -22,13 +22,20 @@ type EmbeddingQueuer interface { QueueThought(ctx context.Context, id uuid.UUID, content string) } +// MetadataQueuer queues a thought for background metadata retry. Both +// MetadataRetryer and EnrichmentRetryer satisfy this. +type MetadataQueuer interface { + QueueThought(id uuid.UUID) +} + type CaptureTool struct { store *store.DB - provider ai.Provider + embeddings *ai.EmbeddingRunner + metadata *ai.MetadataRunner capture config.CaptureConfig sessions *session.ActiveProjects metadataTimeout time.Duration - retryer *MetadataRetryer + retryer MetadataQueuer embedRetryer EmbeddingQueuer log *slog.Logger } @@ -42,8 +49,8 @@ type CaptureOutput struct { Thought thoughttypes.Thought `json:"thought"` } -func NewCaptureTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, retryer *MetadataRetryer, embedRetryer EmbeddingQueuer, log *slog.Logger) *CaptureTool { - return &CaptureTool{store: db, provider: provider, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, retryer: retryer, embedRetryer: embedRetryer, log: log} +func NewCaptureTool(db *store.DB, embeddings *ai.EmbeddingRunner, metadata *ai.MetadataRunner, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, retryer MetadataQueuer, embedRetryer EmbeddingQueuer, log *slog.Logger) *CaptureTool { + return &CaptureTool{store: db, embeddings: embeddings, metadata: metadata, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, retryer: retryer, embedRetryer: embedRetryer, log: log} } func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in CaptureInput) (*mcp.CallToolResult, CaptureOutput, error) { @@ -66,7 +73,7 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C thought.ProjectID = &project.ID } - created, err := t.store.InsertThought(ctx, thought, t.provider.EmbeddingModel()) + created, err := t.store.InsertThought(ctx, thought, t.embeddings.PrimaryModel()) if err != nil { return nil, CaptureOutput{}, err } @@ -89,7 +96,7 @@ func (t *CaptureTool) launchEnrichment(id uuid.UUID, content string) { if t.retryer != nil { attemptedAt := time.Now().UTC() rawMetadata := metadata.Fallback(t.capture) - extracted, err := t.provider.ExtractMetadata(ctx, content) + extracted, err := t.metadata.ExtractMetadata(ctx, content) if err != nil { failed := metadata.MarkMetadataFailed(rawMetadata, t.capture, attemptedAt, err) if _, updateErr := t.store.UpdateThoughtMetadata(ctx, id, failed); updateErr != nil { @@ -100,7 +107,7 @@ func (t *CaptureTool) launchEnrichment(id uuid.UUID, content string) { } t.log.Warn("deferred metadata extraction failed", slog.String("thought_id", id.String()), - slog.String("provider", t.provider.Name()), + slog.String("provider", t.metadata.PrimaryProvider()), slog.String("error", err.Error()), ) t.retryer.QueueThought(id) @@ -116,10 +123,10 @@ func (t *CaptureTool) launchEnrichment(id uuid.UUID, content string) { } if t.embedRetryer != nil { - if _, err := t.provider.Embed(ctx, content); err != nil { + if _, err := t.embeddings.Embed(ctx, content); err != nil { t.log.Warn("deferred embedding failed", slog.String("thought_id", id.String()), - slog.String("provider", t.provider.Name()), + slog.String("provider", t.embeddings.PrimaryProvider()), slog.String("error", err.Error()), ) } diff --git a/internal/tools/context.go b/internal/tools/context.go index e65168e..ffc449e 100644 --- a/internal/tools/context.go +++ b/internal/tools/context.go @@ -15,10 +15,10 @@ import ( ) type ContextTool struct { - store *store.DB - provider ai.Provider - search config.SearchConfig - sessions *session.ActiveProjects + store *store.DB + embeddings *ai.EmbeddingRunner + search config.SearchConfig + sessions *session.ActiveProjects } type ProjectContextInput struct { @@ -41,8 +41,8 @@ type ProjectContextOutput struct { Items []ContextItem `json:"items"` } -func NewContextTool(db *store.DB, provider ai.Provider, search config.SearchConfig, sessions *session.ActiveProjects) *ContextTool { - return &ContextTool{store: db, provider: provider, search: search, sessions: sessions} +func NewContextTool(db *store.DB, embeddings *ai.EmbeddingRunner, search config.SearchConfig, sessions *session.ActiveProjects) *ContextTool { + return &ContextTool{store: db, embeddings: embeddings, search: search, sessions: sessions} } func (t *ContextTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in ProjectContextInput) (*mcp.CallToolResult, ProjectContextOutput, error) { @@ -72,7 +72,7 @@ func (t *ContextTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in P query := strings.TrimSpace(in.Query) if query != "" { - semantic, err := semanticSearch(ctx, t.store, t.provider, t.search, query, limit, t.search.DefaultThreshold, &project.ID, nil) + semantic, err := semanticSearch(ctx, t.store, t.embeddings, t.search, query, limit, t.search.DefaultThreshold, &project.ID, nil) if err != nil { return nil, ProjectContextOutput{}, err } diff --git a/internal/tools/enrichment_retry.go b/internal/tools/enrichment_retry.go index 6a3d4d4..ef770b3 100644 --- a/internal/tools/enrichment_retry.go +++ b/internal/tools/enrichment_retry.go @@ -32,7 +32,7 @@ var enrichmentRetryBackoff = []time.Duration{ type EnrichmentRetryer struct { backgroundCtx context.Context store *store.DB - provider ai.Provider + metadata *ai.MetadataRunner capture config.CaptureConfig sessions *session.ActiveProjects metadataTimeout time.Duration @@ -66,14 +66,14 @@ type RetryEnrichmentOutput struct { Failures []RetryEnrichmentFailure `json:"failures,omitempty"` } -func NewEnrichmentRetryer(backgroundCtx context.Context, db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *EnrichmentRetryer { +func NewEnrichmentRetryer(backgroundCtx context.Context, db *store.DB, metadataRunner *ai.MetadataRunner, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *EnrichmentRetryer { if backgroundCtx == nil { backgroundCtx = context.Background() } return &EnrichmentRetryer{ backgroundCtx: backgroundCtx, store: db, - provider: provider, + metadata: metadataRunner, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, @@ -190,7 +190,7 @@ func (r *EnrichmentRetryer) retryOne(ctx context.Context, id uuid.UUID) (bool, e } attemptedAt := time.Now().UTC() - extracted, extractErr := r.provider.ExtractMetadata(attemptCtx, thought.Content) + extracted, extractErr := r.metadata.ExtractMetadata(attemptCtx, thought.Content) if extractErr != nil { failedMetadata := metadata.MarkMetadataFailed(thought.Metadata, r.capture, attemptedAt, extractErr) if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, failedMetadata); updateErr != nil { diff --git a/internal/tools/links.go b/internal/tools/links.go index 1cde072..a61d501 100644 --- a/internal/tools/links.go +++ b/internal/tools/links.go @@ -13,9 +13,9 @@ import ( ) type LinksTool struct { - store *store.DB - provider ai.Provider - search config.SearchConfig + store *store.DB + embeddings *ai.EmbeddingRunner + search config.SearchConfig } type LinkInput struct { @@ -47,8 +47,8 @@ type RelatedOutput struct { Related []RelatedThought `json:"related"` } -func NewLinksTool(db *store.DB, provider ai.Provider, search config.SearchConfig) *LinksTool { - return &LinksTool{store: db, provider: provider, search: search} +func NewLinksTool(db *store.DB, embeddings *ai.EmbeddingRunner, search config.SearchConfig) *LinksTool { + return &LinksTool{store: db, embeddings: embeddings, search: search} } func (t *LinksTool) Link(ctx context.Context, _ *mcp.CallToolRequest, in LinkInput) (*mcp.CallToolResult, LinkOutput, error) { @@ -117,7 +117,7 @@ func (t *LinksTool) Related(ctx context.Context, _ *mcp.CallToolRequest, in Rela } if includeSemantic { - semantic, err := semanticSearch(ctx, t.store, t.provider, t.search, thought.Content, t.search.DefaultLimit, t.search.DefaultThreshold, thought.ProjectID, &thought.ID) + semantic, err := semanticSearch(ctx, t.store, t.embeddings, t.search, thought.Content, t.search.DefaultLimit, t.search.DefaultThreshold, thought.ProjectID, &thought.ID) if err != nil { return nil, RelatedOutput{}, err } diff --git a/internal/tools/metadata_retry.go b/internal/tools/metadata_retry.go index ceb2268..06fa321 100644 --- a/internal/tools/metadata_retry.go +++ b/internal/tools/metadata_retry.go @@ -23,7 +23,7 @@ const metadataRetryConcurrency = 4 type MetadataRetryer struct { backgroundCtx context.Context store *store.DB - provider ai.Provider + metadata *ai.MetadataRunner capture config.CaptureConfig sessions *session.ActiveProjects metadataTimeout time.Duration @@ -87,14 +87,14 @@ type RetryMetadataOutput struct { Failures []RetryMetadataFailure `json:"failures,omitempty"` } -func NewMetadataRetryer(backgroundCtx context.Context, db *store.DB, provider ai.Provider, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *MetadataRetryer { +func NewMetadataRetryer(backgroundCtx context.Context, db *store.DB, metadataRunner *ai.MetadataRunner, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, logger *slog.Logger) *MetadataRetryer { if backgroundCtx == nil { backgroundCtx = context.Background() } return &MetadataRetryer{ backgroundCtx: backgroundCtx, store: db, - provider: provider, + metadata: metadataRunner, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, @@ -223,7 +223,7 @@ func (r *MetadataRetryer) retryOne(ctx context.Context, id uuid.UUID) (bool, err } attemptedAt := time.Now().UTC() - extracted, extractErr := r.provider.ExtractMetadata(attemptCtx, thought.Content) + extracted, extractErr := r.metadata.ExtractMetadata(attemptCtx, thought.Content) if extractErr != nil { failedMetadata := metadata.MarkMetadataFailed(thought.Metadata, r.capture, attemptedAt, extractErr) if _, updateErr := r.store.UpdateThoughtMetadata(ctx, thought.ID, failedMetadata); updateErr != nil { diff --git a/internal/tools/recall.go b/internal/tools/recall.go index fa824b0..16fb784 100644 --- a/internal/tools/recall.go +++ b/internal/tools/recall.go @@ -15,10 +15,10 @@ import ( ) type RecallTool struct { - store *store.DB - provider ai.Provider - search config.SearchConfig - sessions *session.ActiveProjects + store *store.DB + embeddings *ai.EmbeddingRunner + search config.SearchConfig + sessions *session.ActiveProjects } type RecallInput struct { @@ -32,8 +32,8 @@ type RecallOutput struct { Items []ContextItem `json:"items"` } -func NewRecallTool(db *store.DB, provider ai.Provider, search config.SearchConfig, sessions *session.ActiveProjects) *RecallTool { - return &RecallTool{store: db, provider: provider, search: search, sessions: sessions} +func NewRecallTool(db *store.DB, embeddings *ai.EmbeddingRunner, search config.SearchConfig, sessions *session.ActiveProjects) *RecallTool { + return &RecallTool{store: db, embeddings: embeddings, search: search, sessions: sessions} } func (t *RecallTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in RecallInput) (*mcp.CallToolResult, RecallOutput, error) { @@ -54,7 +54,7 @@ func (t *RecallTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in Re projectID = &project.ID } - semantic, err := semanticSearch(ctx, t.store, t.provider, t.search, query, limit, t.search.DefaultThreshold, projectID, nil) + semantic, err := semanticSearch(ctx, t.store, t.embeddings, t.search, query, limit, t.search.DefaultThreshold, projectID, nil) if err != nil { return nil, RecallOutput{}, err } diff --git a/internal/tools/reparse_metadata.go b/internal/tools/reparse_metadata.go index a7d5bb2..7b5e31c 100644 --- a/internal/tools/reparse_metadata.go +++ b/internal/tools/reparse_metadata.go @@ -23,7 +23,7 @@ const metadataReparseConcurrency = 4 type ReparseMetadataTool struct { store *store.DB - provider ai.Provider + metadata *ai.MetadataRunner capture config.CaptureConfig sessions *session.ActiveProjects logger *slog.Logger @@ -53,8 +53,8 @@ type ReparseMetadataOutput struct { Failures []ReparseMetadataFailure `json:"failures,omitempty"` } -func NewReparseMetadataTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, sessions *session.ActiveProjects, logger *slog.Logger) *ReparseMetadataTool { - return &ReparseMetadataTool{store: db, provider: provider, capture: capture, sessions: sessions, logger: logger} +func NewReparseMetadataTool(db *store.DB, metadataRunner *ai.MetadataRunner, capture config.CaptureConfig, sessions *session.ActiveProjects, logger *slog.Logger) *ReparseMetadataTool { + return &ReparseMetadataTool{store: db, metadata: metadataRunner, capture: capture, sessions: sessions, logger: logger} } func (t *ReparseMetadataTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in ReparseMetadataInput) (*mcp.CallToolResult, ReparseMetadataOutput, error) { @@ -107,7 +107,7 @@ func (t *ReparseMetadataTool) Handle(ctx context.Context, req *mcp.CallToolReque normalizedCurrent := metadata.Normalize(thought.Metadata, t.capture) attemptedAt := time.Now().UTC() - extracted, extractErr := t.provider.ExtractMetadata(ctx, thought.Content) + extracted, extractErr := t.metadata.ExtractMetadata(ctx, thought.Content) normalizedTarget := normalizedCurrent if extractErr != nil { normalizedTarget = metadata.MarkMetadataFailed(normalizedCurrent, t.capture, attemptedAt, extractErr) diff --git a/internal/tools/retrieval.go b/internal/tools/retrieval.go index f937e74..5d51c18 100644 --- a/internal/tools/retrieval.go +++ b/internal/tools/retrieval.go @@ -11,12 +11,14 @@ import ( thoughttypes "git.warky.dev/wdevs/amcs/internal/types" ) -// semanticSearch runs vector similarity search if embeddings exist for the active model -// in the given scope, otherwise falls back to Postgres full-text search. +// semanticSearch runs vector similarity search if embeddings exist for the +// primary embedding model in the given scope, otherwise falls back to Postgres +// full-text search. Search always uses the primary model so query vectors +// match rows stored under the primary model name. func semanticSearch( ctx context.Context, db *store.DB, - provider ai.Provider, + embeddings *ai.EmbeddingRunner, search config.SearchConfig, query string, limit int, @@ -24,17 +26,18 @@ func semanticSearch( projectID *uuid.UUID, excludeID *uuid.UUID, ) ([]thoughttypes.SearchResult, error) { - hasEmbeddings, err := db.HasEmbeddingsForModel(ctx, provider.EmbeddingModel(), projectID) + model := embeddings.PrimaryModel() + hasEmbeddings, err := db.HasEmbeddingsForModel(ctx, model, projectID) if err != nil { return nil, err } if hasEmbeddings { - embedding, err := provider.Embed(ctx, query) + embedding, err := embeddings.EmbedPrimary(ctx, query) if err != nil { return nil, err } - return db.SearchSimilarThoughts(ctx, embedding, provider.EmbeddingModel(), threshold, limit, projectID, excludeID) + return db.SearchSimilarThoughts(ctx, embedding, model, threshold, limit, projectID, excludeID) } return db.SearchThoughtsText(ctx, query, limit, projectID, excludeID) diff --git a/internal/tools/search.go b/internal/tools/search.go index db05ab3..6aade33 100644 --- a/internal/tools/search.go +++ b/internal/tools/search.go @@ -15,10 +15,10 @@ import ( ) type SearchTool struct { - store *store.DB - provider ai.Provider - search config.SearchConfig - sessions *session.ActiveProjects + store *store.DB + embeddings *ai.EmbeddingRunner + search config.SearchConfig + sessions *session.ActiveProjects } type SearchInput struct { @@ -32,8 +32,8 @@ type SearchOutput struct { Results []thoughttypes.SearchResult `json:"results"` } -func NewSearchTool(db *store.DB, provider ai.Provider, search config.SearchConfig, sessions *session.ActiveProjects) *SearchTool { - return &SearchTool{store: db, provider: provider, search: search, sessions: sessions} +func NewSearchTool(db *store.DB, embeddings *ai.EmbeddingRunner, search config.SearchConfig, sessions *session.ActiveProjects) *SearchTool { + return &SearchTool{store: db, embeddings: embeddings, search: search, sessions: sessions} } func (t *SearchTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in SearchInput) (*mcp.CallToolResult, SearchOutput, error) { @@ -56,7 +56,7 @@ func (t *SearchTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in Se _ = t.store.TouchProject(ctx, project.ID) } - results, err := semanticSearch(ctx, t.store, t.provider, t.search, query, limit, threshold, projectID, nil) + results, err := semanticSearch(ctx, t.store, t.embeddings, t.search, query, limit, threshold, projectID, nil) if err != nil { return nil, SearchOutput{}, err } diff --git a/internal/tools/summarize.go b/internal/tools/summarize.go index 6c60939..3762bce 100644 --- a/internal/tools/summarize.go +++ b/internal/tools/summarize.go @@ -14,10 +14,11 @@ import ( ) type SummarizeTool struct { - store *store.DB - provider ai.Provider - search config.SearchConfig - sessions *session.ActiveProjects + store *store.DB + embeddings *ai.EmbeddingRunner + metadata *ai.MetadataRunner + search config.SearchConfig + sessions *session.ActiveProjects } type SummarizeInput struct { @@ -32,8 +33,8 @@ type SummarizeOutput struct { Count int `json:"count"` } -func NewSummarizeTool(db *store.DB, provider ai.Provider, search config.SearchConfig, sessions *session.ActiveProjects) *SummarizeTool { - return &SummarizeTool{store: db, provider: provider, search: search, sessions: sessions} +func NewSummarizeTool(db *store.DB, embeddings *ai.EmbeddingRunner, metadata *ai.MetadataRunner, search config.SearchConfig, sessions *session.ActiveProjects) *SummarizeTool { + return &SummarizeTool{store: db, embeddings: embeddings, metadata: metadata, search: search, sessions: sessions} } func (t *SummarizeTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in SummarizeInput) (*mcp.CallToolResult, SummarizeOutput, error) { @@ -52,7 +53,7 @@ func (t *SummarizeTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in if project != nil { projectID = &project.ID } - results, err := semanticSearch(ctx, t.store, t.provider, t.search, query, limit, t.search.DefaultThreshold, projectID, nil) + results, err := semanticSearch(ctx, t.store, t.embeddings, t.search, query, limit, t.search.DefaultThreshold, projectID, nil) if err != nil { return nil, SummarizeOutput{}, err } @@ -77,7 +78,7 @@ func (t *SummarizeTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in userPrompt := formatContextBlock("Summarize the following thoughts into concise prose with themes, action items, and notable people.", lines) systemPrompt := "You summarize note collections. Be concise, concrete, and structured in plain prose." - summary, err := t.provider.Summarize(ctx, systemPrompt, userPrompt) + summary, err := t.metadata.Summarize(ctx, systemPrompt, userPrompt) if err != nil { return nil, SummarizeOutput{}, err } diff --git a/internal/tools/update.go b/internal/tools/update.go index 0f8a3b5..4865128 100644 --- a/internal/tools/update.go +++ b/internal/tools/update.go @@ -16,10 +16,11 @@ import ( ) type UpdateTool struct { - store *store.DB - provider ai.Provider - capture config.CaptureConfig - log *slog.Logger + store *store.DB + embeddings *ai.EmbeddingRunner + metadata *ai.MetadataRunner + capture config.CaptureConfig + log *slog.Logger } type UpdateInput struct { @@ -33,8 +34,8 @@ type UpdateOutput struct { Thought thoughttypes.Thought `json:"thought"` } -func NewUpdateTool(db *store.DB, provider ai.Provider, capture config.CaptureConfig, log *slog.Logger) *UpdateTool { - return &UpdateTool{store: db, provider: provider, capture: capture, log: log} +func NewUpdateTool(db *store.DB, embeddings *ai.EmbeddingRunner, metadata *ai.MetadataRunner, capture config.CaptureConfig, log *slog.Logger) *UpdateTool { + return &UpdateTool{store: db, embeddings: embeddings, metadata: metadata, capture: capture, log: log} } func (t *UpdateTool) Handle(ctx context.Context, _ *mcp.CallToolRequest, in UpdateInput) (*mcp.CallToolResult, UpdateOutput, error) { @@ -50,6 +51,7 @@ func (t *UpdateTool) Handle(ctx context.Context, _ *mcp.CallToolRequest, in Upda content := current.Content var embedding []float32 + embeddingModel := "" mergedMetadata := current.Metadata projectID := current.ProjectID @@ -58,11 +60,13 @@ func (t *UpdateTool) Handle(ctx context.Context, _ *mcp.CallToolRequest, in Upda if content == "" { return nil, UpdateOutput{}, errInvalidInput("content must not be empty") } - embedding, err = t.provider.Embed(ctx, content) + embedResult, err := t.embeddings.Embed(ctx, content) if err != nil { return nil, UpdateOutput{}, err } - extracted, extractErr := t.provider.ExtractMetadata(ctx, content) + embedding = embedResult.Vector + embeddingModel = embedResult.Model + extracted, extractErr := t.metadata.ExtractMetadata(ctx, content) if extractErr != nil { t.log.Warn("metadata extraction failed during update, keeping current metadata", slog.String("error", extractErr.Error())) mergedMetadata = metadata.MarkMetadataFailed(mergedMetadata, t.capture, time.Now().UTC(), extractErr) @@ -82,7 +86,7 @@ func (t *UpdateTool) Handle(ctx context.Context, _ *mcp.CallToolRequest, in Upda projectID = &project.ID } - updated, err := t.store.UpdateThought(ctx, id, content, embedding, t.provider.EmbeddingModel(), mergedMetadata, projectID) + updated, err := t.store.UpdateThought(ctx, id, content, embedding, embeddingModel, mergedMetadata, projectID) if err != nil { return nil, UpdateOutput{}, err } From 7f9c6f122ea9134d686ebd0abd8d372260b63312 Mon Sep 17 00:00:00 2001 From: Hein Date: Tue, 21 Apr 2026 21:18:34 +0200 Subject: [PATCH 05/15] feat(docker): add migrate-config service for database migrations * Include amcs-migrate-config binary in Docker image * Document migration commands in README --- Dockerfile | 10 +++++++++- README.md | 16 ++++++++++++++++ changelog.md | 5 +++++ docker-compose.yml | 12 ++++++++++++ 4 files changed, 42 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index c0561a5..c97d16a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,7 +29,14 @@ RUN set -eu; \ -X git.warky.dev/wdevs/amcs/internal/buildinfo.TagName=${VERSION_TAG} \ -X git.warky.dev/wdevs/amcs/internal/buildinfo.Commit=${COMMIT_SHA} \ -X git.warky.dev/wdevs/amcs/internal/buildinfo.BuildDate=${BUILD_DATE}" \ - -o /out/amcs-server ./cmd/amcs-server + -o /out/amcs-server ./cmd/amcs-server; \ + CGO_ENABLED=0 GOOS=linux go build -trimpath \ + -ldflags="-s -w \ + -X git.warky.dev/wdevs/amcs/internal/buildinfo.Version=${VERSION_TAG} \ + -X git.warky.dev/wdevs/amcs/internal/buildinfo.TagName=${VERSION_TAG} \ + -X git.warky.dev/wdevs/amcs/internal/buildinfo.Commit=${COMMIT_SHA} \ + -X git.warky.dev/wdevs/amcs/internal/buildinfo.BuildDate=${BUILD_DATE}" \ + -o /out/amcs-migrate-config ./cmd/amcs-migrate-config FROM debian:bookworm-slim @@ -41,6 +48,7 @@ RUN apt-get update \ WORKDIR /app COPY --from=builder /out/amcs-server /app/amcs-server +COPY --from=builder /out/amcs-migrate-config /app/amcs-migrate-config COPY --chown=appuser:appuser configs /app/configs USER appuser diff --git a/README.md b/README.md index 2f626c8..b9219e3 100644 --- a/README.md +++ b/README.md @@ -654,6 +654,22 @@ Notes: - Database migrations `001` through `005` run automatically when the Postgres volume is created for the first time. - `migrations/006_rls_and_grants.sql` is intentionally skipped during container bootstrap because it contains deployment-specific grants for a role named `amcs_user`. +### Run config migration with Compose + +The container image now includes `/app/amcs-migrate-config`. + +Dry-run (prints migrated YAML, does not write files): + +```bash +docker compose --profile tools run --rm migrate-config --config /app/configs/dev.yaml --dry-run +``` + +Apply migration in-place (writes file + creates backup): + +```bash +docker compose --profile tools run --rm migrate-config --config /app/configs/dev.yaml +``` + ## Ollama Set your role targets to an Ollama provider to use a local or self-hosted Ollama server through its OpenAI-compatible API. diff --git a/changelog.md b/changelog.md index 5e64c3a..1a563e8 100644 --- a/changelog.md +++ b/changelog.md @@ -78,3 +78,8 @@ - New: `internal/ai/runner_test.go` - New: `internal/config/migrate.go` - New: `internal/config/migrate_test.go` + +### 2026-04-21 21h - Docker Support for Config Migration CLI +- Added `amcs-migrate-config` binary to the Docker image build output. +- Added `migrate-config` service in `docker-compose.yml` under the `tools` profile. +- Documented compose-based migration commands (dry-run and in-place apply) in the README. diff --git a/docker-compose.yml b/docker-compose.yml index 6d9bcc4..ce57a31 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -36,6 +36,18 @@ services: ports: - "8080:8080" + migrate-config: + build: + context: . + profiles: ["tools"] + restart: "no" + volumes: + - ./configs:/app/configs + environment: + AMCS_CONFIG: /app/configs/docker.yaml + entrypoint: ["/app/amcs-migrate-config"] + command: ["--config", "/app/configs/docker.yaml", "--dry-run"] + volumes: postgres_data: From 55859811be5fb2a37c59619d2506a8a225b0cbe8 Mon Sep 17 00:00:00 2001 From: Hein Date: Tue, 21 Apr 2026 21:31:05 +0200 Subject: [PATCH 06/15] fix(loader): disable config file rewrite during startup * migrate legacy schemas in memory only * log hint to use amcs-migrate-config for persistence --- README.md | 1 + changelog.md | 5 +++++ internal/config/loader.go | 22 ++-------------------- internal/config/loader_test.go | 13 +++++++++++-- 4 files changed, 19 insertions(+), 22 deletions(-) diff --git a/README.md b/README.md index b9219e3..64b2570 100644 --- a/README.md +++ b/README.md @@ -260,6 +260,7 @@ go run ./cmd/amcs-migrate-config --config ./configs/dev.yaml ``` Use `--dry-run` to print migrated YAML without writing. +Server startup migrates older config formats in memory only and does not write files. **OAuth Client Credentials flow**: diff --git a/changelog.md b/changelog.md index 1a563e8..c32709b 100644 --- a/changelog.md +++ b/changelog.md @@ -83,3 +83,8 @@ - Added `amcs-migrate-config` binary to the Docker image build output. - Added `migrate-config` service in `docker-compose.yml` under the `tools` profile. - Documented compose-based migration commands (dry-run and in-place apply) in the README. + +### 2026-04-21 21h - Startup Migration Write Disabled +- Changed config loading to migrate legacy schemas in memory only during startup. +- Removed automatic file rewrite and backup creation from the startup config loader. +- Added loader log hint to use `amcs-migrate-config` when persistent conversion is needed. diff --git a/internal/config/loader.go b/internal/config/loader.go index f133b59..bdd5d7c 100644 --- a/internal/config/loader.go +++ b/internal/config/loader.go @@ -40,16 +40,14 @@ func LoadWithLogger(explicitPath string, log *slog.Logger) (*Config, string, err } if len(applied) > 0 { - if err := rewriteConfigFile(path, data, raw); err != nil { - return nil, path, err - } if log != nil { for _, step := range applied { - log.Warn("config migrated", + log.Warn("config migrated in memory", slog.String("path", path), slog.Int("from_version", step.From), slog.Int("to_version", step.To), slog.String("describe", step.Describe), + slog.String("hint", "persist with amcs-migrate-config"), ) } } @@ -81,22 +79,6 @@ func decodeTyped(raw map[string]any) (Config, error) { return cfg, nil } -func rewriteConfigFile(path string, original []byte, migrated map[string]any) error { - backupPath := fmt.Sprintf("%s.bak.%d", path, time.Now().Unix()) - if err := os.WriteFile(backupPath, original, 0o600); err != nil { - return fmt.Errorf("write backup %q: %w", backupPath, err) - } - - out, err := yaml.Marshal(migrated) - if err != nil { - return fmt.Errorf("marshal migrated config: %w", err) - } - if err := os.WriteFile(path, out, 0o600); err != nil { - return fmt.Errorf("write migrated config %q: %w", path, err) - } - return nil -} - func ResolvePath(explicitPath string) string { if path := strings.TrimSpace(explicitPath); path != "" { if path != ".yaml" && path != ".yml" { diff --git a/internal/config/loader_test.go b/internal/config/loader_test.go index c0d685c..1df107a 100644 --- a/internal/config/loader_test.go +++ b/internal/config/loader_test.go @@ -3,6 +3,7 @@ package config import ( "os" "path/filepath" + "strings" "testing" "time" ) @@ -215,7 +216,15 @@ logging: if err != nil { t.Fatalf("glob backups: %v", err) } - if len(entries) != 1 { - t.Fatalf("backup files = %d, want 1", len(entries)) + if len(entries) != 0 { + t.Fatalf("backup files = %d, want 0 (load should not rewrite config)", len(entries)) + } + + originalOnDisk, err := os.ReadFile(configPath) + if err != nil { + t.Fatalf("read original config: %v", err) + } + if !strings.Contains(string(originalOnDisk), "provider: \"litellm\"") { + t.Fatalf("expected source config to remain unchanged on disk") } } From 979afc909e5f4651cd48a8da5432a8ba13027b1b Mon Sep 17 00:00:00 2001 From: Hein Date: Tue, 21 Apr 2026 22:00:43 +0200 Subject: [PATCH 07/15] fix(cli): update environment variable handling for server URL --- README.md | 4 ++-- cmd/amcs-cli/cmd/root.go | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 64b2570..131f8e5 100644 --- a/README.md +++ b/README.md @@ -572,7 +572,7 @@ server: https://your-amcs-server token: your-bearer-token ``` -Env vars override the config file: `AMCS_URL`, `AMCS_TOKEN`. Flags `--server` and `--token` override env vars. +Env vars override the config file: `AMCS_SERVER` (preferred), `AMCS_URL` (legacy alias), and `AMCS_TOKEN`. Flags `--server` and `--token` override env vars. ### stdio MCP client setup @@ -586,7 +586,7 @@ With inline credentials (no config file): ```bash claude mcp add --transport stdio amcs amcs-cli stdio \ - --env AMCS_URL=https://your-amcs-server \ + --env AMCS_SERVER=https://your-amcs-server \ --env AMCS_TOKEN=your-bearer-token ``` diff --git a/cmd/amcs-cli/cmd/root.go b/cmd/amcs-cli/cmd/root.go index 3c49bda..2ff540c 100644 --- a/cmd/amcs-cli/cmd/root.go +++ b/cmd/amcs-cli/cmd/root.go @@ -54,6 +54,9 @@ func loadConfig() error { return err } cfg = loaded + if v := strings.TrimSpace(os.Getenv("AMCS_SERVER")); v != "" { + cfg.Server = v + } if v := strings.TrimSpace(os.Getenv("AMCS_URL")); v != "" { cfg.Server = v } @@ -75,7 +78,7 @@ func loadConfig() error { func requireServer() error { if strings.TrimSpace(cfg.Server) == "" { - return fmt.Errorf("server URL is required; set --server, AMCS_URL, or config server") + return fmt.Errorf("server URL is required; set --server, AMCS_SERVER, AMCS_URL, or config server") } return nil } From 9a9fa4f3841ce1bd31ff49c473c2a1f342daec26 Mon Sep 17 00:00:00 2001 From: Hein Date: Tue, 21 Apr 2026 22:24:57 +0200 Subject: [PATCH 08/15] feat(cli): add verbose logging option for CLI commands * Introduced a new flag `--verbose` to enable detailed logging. * Implemented logging for connection events in SSE and stdio commands. * Added a utility function to handle verbose logging. --- cmd/amcs-cli/cmd/root.go | 11 ++++ cmd/amcs-cli/cmd/root_test.go | 31 ++++++++++++ cmd/amcs-cli/cmd/sse.go | 4 ++ cmd/amcs-cli/cmd/stdio.go | 2 + internal/app/app.go | 2 +- internal/app/oauth.go | 5 +- internal/auth/keyring_test.go | 31 ++++++++++++ internal/auth/middleware.go | 12 +++-- internal/observability/http.go | 13 ++--- internal/observability/http_test.go | 23 +++++++++ internal/requestip/requestip.go | 76 ++++++++++++++++++++++++++++ internal/requestip/requestip_test.go | 47 +++++++++++++++++ internal/tools/backfill.go | 23 +++++++-- internal/tools/capture.go | 76 +++++----------------------- internal/tools/enrichment_retry.go | 22 +++++++- internal/tools/metadata_retry.go | 26 +++++++++- 16 files changed, 317 insertions(+), 87 deletions(-) create mode 100644 cmd/amcs-cli/cmd/root_test.go create mode 100644 internal/requestip/requestip.go create mode 100644 internal/requestip/requestip_test.go diff --git a/cmd/amcs-cli/cmd/root.go b/cmd/amcs-cli/cmd/root.go index 2ff540c..e1a69c6 100644 --- a/cmd/amcs-cli/cmd/root.go +++ b/cmd/amcs-cli/cmd/root.go @@ -17,6 +17,7 @@ var ( serverFlag string tokenFlag string outputFlag string + verbose bool cfg Config ) @@ -42,6 +43,7 @@ func init() { rootCmd.PersistentFlags().StringVar(&serverFlag, "server", "", "AMCS server URL") rootCmd.PersistentFlags().StringVar(&tokenFlag, "token", "", "AMCS bearer token") rootCmd.PersistentFlags().StringVar(&outputFlag, "output", "json", "Output format: json or yaml") + rootCmd.PersistentFlags().BoolVar(&verbose, "verbose", false, "Enable verbose logging to stderr") } func loadConfig() error { @@ -122,6 +124,7 @@ func connectRemote(ctx context.Context) (*mcp.ClientSession, error) { if err := requireServer(); err != nil { return nil, err } + verboseLogf("connecting to %s", endpointURL()) client := mcp.NewClient(&mcp.Implementation{Name: "amcs-cli", Version: "0.0.1"}, nil) transport := &mcp.StreamableClientTransport{ Endpoint: endpointURL(), @@ -133,5 +136,13 @@ func connectRemote(ctx context.Context) (*mcp.ClientSession, error) { if err != nil { return nil, fmt.Errorf("connect to AMCS server: %w", err) } + verboseLogf("connected to %s", endpointURL()) return session, nil } + +func verboseLogf(format string, args ...any) { + if !verbose { + return + } + _, _ = fmt.Fprintf(os.Stderr, "[amcs-cli] "+format+"\n", args...) +} diff --git a/cmd/amcs-cli/cmd/root_test.go b/cmd/amcs-cli/cmd/root_test.go new file mode 100644 index 0000000..2bb737e --- /dev/null +++ b/cmd/amcs-cli/cmd/root_test.go @@ -0,0 +1,31 @@ +package cmd + +import ( + "net/http" + "net/http/httptest" + "testing" +) + +func TestBearerTransportFormatsBearerToken(t *testing.T) { + const want = "Bearer X" + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if got := r.Header.Get("Authorization"); got != want { + t.Fatalf("Authorization header = %q, want %q", got, want) + } + w.WriteHeader(http.StatusNoContent) + })) + defer ts.Close() + + client := &http.Client{Transport: &bearerTransport{token: "X"}} + req, err := http.NewRequest(http.MethodGet, ts.URL, nil) + if err != nil { + t.Fatalf("NewRequest() error = %v", err) + } + + res, err := client.Do(req) + if err != nil { + t.Fatalf("client.Do() error = %v", err) + } + _ = res.Body.Close() +} diff --git a/cmd/amcs-cli/cmd/sse.go b/cmd/amcs-cli/cmd/sse.go index 184e5ce..a5a7b47 100644 --- a/cmd/amcs-cli/cmd/sse.go +++ b/cmd/amcs-cli/cmd/sse.go @@ -29,11 +29,13 @@ var sseCmd = &cobra.Command{ connectCtx, cancel := context.WithTimeout(ctx, 30*time.Second) defer cancel() + verboseLogf("connecting to SSE endpoint %s", sseEndpointURL()) remote, err := client.Connect(connectCtx, transport, nil) if err != nil { return fmt.Errorf("connect to AMCS SSE endpoint: %w", err) } defer func() { _ = remote.Close() }() + verboseLogf("connected to SSE endpoint %s", sseEndpointURL()) tools, err := remote.ListTools(ctx, nil) if err != nil { @@ -67,6 +69,8 @@ var sseCmd = &cobra.Command{ return fmt.Errorf("start stdio bridge: %w", err) } defer func() { _ = session.Close() }() + verboseLogf("sse stdio bridge ready") + verboseLogf("waiting for MCP commands on stdin") <-ctx.Done() return nil diff --git a/cmd/amcs-cli/cmd/stdio.go b/cmd/amcs-cli/cmd/stdio.go index 2c5eb09..c8c700b 100644 --- a/cmd/amcs-cli/cmd/stdio.go +++ b/cmd/amcs-cli/cmd/stdio.go @@ -51,6 +51,8 @@ var stdioCmd = &cobra.Command{ return fmt.Errorf("start stdio bridge: %w", err) } defer func() { _ = session.Close() }() + verboseLogf("stdio bridge connected to remote AMCS and ready") + verboseLogf("waiting for MCP commands on stdin") <-ctx.Done() return nil diff --git a/internal/app/app.go b/internal/app/app.go index e4471b6..3359c30 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -193,7 +193,7 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st backfillTool := tools.NewBackfillTool(db, bgEmbeddings, activeProjects, logger) toolSet := mcpserver.ToolSet{ - Capture: tools.NewCaptureTool(db, embeddings, metadata, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, nil, backfillTool, logger), + Capture: tools.NewCaptureTool(db, embeddings, cfg.Capture, activeProjects, enrichmentRetryer, backfillTool), Search: tools.NewSearchTool(db, embeddings, cfg.Search, activeProjects), List: tools.NewListTool(db, cfg.Search, activeProjects), Stats: tools.NewStatsTool(db), diff --git a/internal/app/oauth.go b/internal/app/oauth.go index 8f6e7e8..6a8731b 100644 --- a/internal/app/oauth.go +++ b/internal/app/oauth.go @@ -14,6 +14,7 @@ import ( "time" "git.warky.dev/wdevs/amcs/internal/auth" + "git.warky.dev/wdevs/amcs/internal/requestip" ) // --- JSON types --- @@ -261,7 +262,7 @@ func handleClientCredentials(w http.ResponseWriter, r *http.Request, oauthRegist } keyID, ok := oauthRegistry.Lookup(clientID, clientSecret) if !ok { - log.Warn("oauth token: invalid client credentials", slog.String("remote_addr", r.RemoteAddr)) + log.Warn("oauth token: invalid client credentials", slog.String("remote_addr", requestip.FromRequest(r))) w.Header().Set("WWW-Authenticate", `Basic realm="oauth"`) writeTokenError(w, "invalid_client", http.StatusUnauthorized) return @@ -290,7 +291,7 @@ func handleAuthorizationCode(w http.ResponseWriter, r *http.Request, authCodes * return } if !verifyPKCE(codeVerifier, entry.CodeChallenge, entry.CodeChallengeMethod) { - log.Warn("oauth token: PKCE verification failed", slog.String("remote_addr", r.RemoteAddr)) + log.Warn("oauth token: PKCE verification failed", slog.String("remote_addr", requestip.FromRequest(r))) writeTokenError(w, "invalid_grant", http.StatusBadRequest) return } diff --git a/internal/auth/keyring_test.go b/internal/auth/keyring_test.go index c72df71..f8ee5a8 100644 --- a/internal/auth/keyring_test.go +++ b/internal/auth/keyring_test.go @@ -157,3 +157,34 @@ func TestMiddlewareRejectsMissingOrInvalidKey(t *testing.T) { t.Fatalf("invalid key status = %d, want %d", rec.Code, http.StatusUnauthorized) } } + +func TestMiddlewareRecordsForwardedRemoteAddr(t *testing.T) { + keyring, err := NewKeyring([]config.APIKey{{ID: "client-a", Value: "secret"}}) + if err != nil { + t.Fatalf("NewKeyring() error = %v", err) + } + tracker := NewAccessTracker() + + handler := Middleware(config.AuthConfig{HeaderName: "x-brain-key"}, keyring, nil, nil, tracker, testLogger())(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNoContent) + })) + + req := httptest.NewRequest(http.MethodGet, "/mcp", nil) + req.RemoteAddr = "10.0.0.5:2222" + req.Header.Set("x-brain-key", "secret") + req.Header.Set("X-Real-IP", "203.0.113.99") + rec := httptest.NewRecorder() + handler.ServeHTTP(rec, req) + + if rec.Code != http.StatusNoContent { + t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent) + } + + snap := tracker.Snapshot() + if len(snap) != 1 { + t.Fatalf("len(snapshot) = %d, want 1", len(snap)) + } + if snap[0].RemoteAddr != "203.0.113.99" { + t.Fatalf("snapshot remote_addr = %q, want %q", snap[0].RemoteAddr, "203.0.113.99") + } +} diff --git a/internal/auth/middleware.go b/internal/auth/middleware.go index 1d075bd..d67ae14 100644 --- a/internal/auth/middleware.go +++ b/internal/auth/middleware.go @@ -9,6 +9,7 @@ import ( "time" "git.warky.dev/wdevs/amcs/internal/config" + "git.warky.dev/wdevs/amcs/internal/requestip" ) type contextKey string @@ -22,17 +23,18 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg } recordAccess := func(r *http.Request, keyID string) { if tracker != nil { - tracker.Record(keyID, r.URL.Path, r.RemoteAddr, r.UserAgent(), time.Now()) + tracker.Record(keyID, r.URL.Path, requestip.FromRequest(r), r.UserAgent(), time.Now()) } } return func(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + remoteAddr := requestip.FromRequest(r) // 1. Custom header → keyring only. if keyring != nil { if token := strings.TrimSpace(r.Header.Get(headerName)); token != "" { keyID, ok := keyring.Lookup(token) if !ok { - log.Warn("authentication failed", slog.String("remote_addr", r.RemoteAddr)) + log.Warn("authentication failed", slog.String("remote_addr", remoteAddr)) http.Error(w, "invalid API key", http.StatusUnauthorized) return } @@ -58,7 +60,7 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg return } } - log.Warn("bearer token rejected", slog.String("remote_addr", r.RemoteAddr)) + log.Warn("bearer token rejected", slog.String("remote_addr", remoteAddr)) http.Error(w, "invalid token or API key", http.StatusUnauthorized) return } @@ -71,7 +73,7 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg } keyID, ok := oauthRegistry.Lookup(clientID, clientSecret) if !ok { - log.Warn("oauth client authentication failed", slog.String("remote_addr", r.RemoteAddr)) + log.Warn("oauth client authentication failed", slog.String("remote_addr", remoteAddr)) http.Error(w, "invalid OAuth client credentials", http.StatusUnauthorized) return } @@ -85,7 +87,7 @@ func Middleware(cfg config.AuthConfig, keyring *Keyring, oauthRegistry *OAuthReg if token := strings.TrimSpace(r.URL.Query().Get(cfg.QueryParam)); token != "" { keyID, ok := keyring.Lookup(token) if !ok { - log.Warn("authentication failed", slog.String("remote_addr", r.RemoteAddr)) + log.Warn("authentication failed", slog.String("remote_addr", remoteAddr)) http.Error(w, "invalid API key", http.StatusUnauthorized) return } diff --git a/internal/observability/http.go b/internal/observability/http.go index f7d7aae..09aaef0 100644 --- a/internal/observability/http.go +++ b/internal/observability/http.go @@ -3,12 +3,13 @@ package observability import ( "context" "log/slog" - "net" "net/http" "runtime/debug" "time" "github.com/google/uuid" + + "git.warky.dev/wdevs/amcs/internal/requestip" ) type contextKey string @@ -67,7 +68,7 @@ func AccessLog(log *slog.Logger) func(http.Handler) http.Handler { slog.String("path", r.URL.Path), slog.Int("status", recorder.status), slog.Duration("duration", time.Since(started)), - slog.String("remote_addr", stripPort(r.RemoteAddr)), + slog.String("remote_addr", requestip.FromRequest(r)), ) }) } @@ -100,11 +101,3 @@ func (s *statusRecorder) WriteHeader(statusCode int) { s.status = statusCode s.ResponseWriter.WriteHeader(statusCode) } - -func stripPort(remote string) string { - host, _, err := net.SplitHostPort(remote) - if err != nil { - return remote - } - return host -} diff --git a/internal/observability/http_test.go b/internal/observability/http_test.go index 45b524c..fe69358 100644 --- a/internal/observability/http_test.go +++ b/internal/observability/http_test.go @@ -1,10 +1,12 @@ package observability import ( + "bytes" "io" "log/slog" "net/http" "net/http/httptest" + "strings" "testing" "time" ) @@ -57,3 +59,24 @@ func TestRecoverHandlesPanic(t *testing.T) { t.Fatalf("status = %d, want %d", rec.Code, http.StatusInternalServerError) } } + +func TestAccessLogUsesForwardedClientIP(t *testing.T) { + var buf bytes.Buffer + logger := slog.New(slog.NewTextHandler(&buf, nil)) + handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNoContent) + })) + + req := httptest.NewRequest(http.MethodGet, "/mcp", nil) + req.RemoteAddr = "10.0.0.10:1234" + req.Header.Set("X-Real-IP", "203.0.113.7") + rec := httptest.NewRecorder() + handler.ServeHTTP(rec, req) + + if rec.Code != http.StatusNoContent { + t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent) + } + if !strings.Contains(buf.String(), "remote_addr=203.0.113.7") { + t.Fatalf("log output = %q, want remote_addr=203.0.113.7", buf.String()) + } +} diff --git a/internal/requestip/requestip.go b/internal/requestip/requestip.go new file mode 100644 index 0000000..ea5ffee --- /dev/null +++ b/internal/requestip/requestip.go @@ -0,0 +1,76 @@ +package requestip + +import ( + "net" + "net/http" + "strings" +) + +// FromRequest returns the best-effort client IP/host for a request, preferring +// proxy headers before falling back to RemoteAddr. +// +// Header precedence: +// 1) X-Real-IP +// 2) X-Forwarded-Host +// 3) X-Forwarded-For (first value) +// 4) Forwarded (for=...) +// 5) RemoteAddr (host part) +func FromRequest(r *http.Request) string { + if r == nil { + return "" + } + + if v := firstAddressToken(r.Header.Get("X-Real-IP")); v != "" { + return stripPort(v) + } + if v := firstAddressToken(r.Header.Get("X-Forwarded-Host")); v != "" { + return stripPort(v) + } + if v := firstAddressToken(r.Header.Get("X-Forwarded-For")); v != "" { + return stripPort(v) + } + if v := forwardedForValue(r.Header.Get("Forwarded")); v != "" { + return stripPort(v) + } + return stripPort(strings.TrimSpace(r.RemoteAddr)) +} + +func firstAddressToken(v string) string { + if v == "" { + return "" + } + part := strings.TrimSpace(strings.Split(v, ",")[0]) + part = strings.Trim(part, `"`) + return strings.TrimSpace(part) +} + +func forwardedForValue(v string) string { + for _, part := range strings.Split(v, ",") { + for _, kv := range strings.Split(part, ";") { + k, raw, ok := strings.Cut(strings.TrimSpace(kv), "=") + if !ok || !strings.EqualFold(strings.TrimSpace(k), "for") { + continue + } + candidate := strings.Trim(strings.TrimSpace(raw), `"`) + if candidate == "" { + continue + } + return candidate + } + } + return "" +} + +func stripPort(addr string) string { + addr = strings.TrimSpace(addr) + if addr == "" { + return "" + } + // RFC 7239 quoted values may wrap IPv6 with brackets. + addr = strings.Trim(addr, "[]") + host, _, err := net.SplitHostPort(addr) + if err == nil { + return host + } + return addr +} diff --git a/internal/requestip/requestip_test.go b/internal/requestip/requestip_test.go new file mode 100644 index 0000000..09a15a7 --- /dev/null +++ b/internal/requestip/requestip_test.go @@ -0,0 +1,47 @@ +package requestip + +import ( + "net/http" + "net/http/httptest" + "testing" +) + +func TestFromRequestPrefersXRealIP(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/", nil) + req.RemoteAddr = "10.0.0.10:5555" + req.Header.Set("X-Forwarded-Host", "proxy.example.com") + req.Header.Set("X-Real-IP", "203.0.113.10") + + if got := FromRequest(req); got != "203.0.113.10" { + t.Fatalf("FromRequest() = %q, want %q", got, "203.0.113.10") + } +} + +func TestFromRequestUsesXForwardedHostWhenRealIPMissing(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/", nil) + req.RemoteAddr = "10.0.0.10:5555" + req.Header.Set("X-Forwarded-Host", "203.0.113.22") + + if got := FromRequest(req); got != "203.0.113.22" { + t.Fatalf("FromRequest() = %q, want %q", got, "203.0.113.22") + } +} + +func TestFromRequestUsesXForwardedForFirstValue(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/", nil) + req.RemoteAddr = "10.0.0.10:5555" + req.Header.Set("X-Forwarded-For", "198.51.100.7, 10.1.1.2") + + if got := FromRequest(req); got != "198.51.100.7" { + t.Fatalf("FromRequest() = %q, want %q", got, "198.51.100.7") + } +} + +func TestFromRequestFallsBackToRemoteAddr(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/", nil) + req.RemoteAddr = "192.0.2.5:1234" + + if got := FromRequest(req); got != "192.0.2.5" { + t.Fatalf("FromRequest() = %q, want %q", got, "192.0.2.5") + } +} diff --git a/internal/tools/backfill.go b/internal/tools/backfill.go index 52d96bc..6ac06ed 100644 --- a/internal/tools/backfill.go +++ b/internal/tools/backfill.go @@ -55,24 +55,41 @@ func NewBackfillTool(db *store.DB, embeddings *ai.EmbeddingRunner, sessions *ses // It is used by capture when the embedding provider is temporarily unavailable. func (t *BackfillTool) QueueThought(ctx context.Context, id uuid.UUID, content string) { go func() { + started := time.Now() + t.logger.Info("background embedding started", + slog.String("thought_id", id.String()), + slog.String("provider", t.embeddings.PrimaryProvider()), + slog.String("model", t.embeddings.PrimaryModel()), + ) + result, err := t.embeddings.Embed(ctx, content) if err != nil { - t.logger.Warn("background embedding retry failed", + t.logger.Warn("background embedding error", slog.String("thought_id", id.String()), + slog.String("provider", t.embeddings.PrimaryProvider()), + slog.String("model", t.embeddings.PrimaryModel()), + slog.String("stage", "embed"), + slog.Duration("duration", time.Since(started)), slog.String("error", err.Error()), ) return } if err := t.store.UpsertEmbedding(ctx, id, result.Model, result.Vector); err != nil { - t.logger.Warn("background embedding upsert failed", + t.logger.Warn("background embedding error", slog.String("thought_id", id.String()), + slog.String("provider", t.embeddings.PrimaryProvider()), + slog.String("model", result.Model), + slog.String("stage", "upsert"), + slog.Duration("duration", time.Since(started)), slog.String("error", err.Error()), ) return } - t.logger.Info("background embedding retry succeeded", + t.logger.Info("background embedding complete", slog.String("thought_id", id.String()), + slog.String("provider", t.embeddings.PrimaryProvider()), slog.String("model", result.Model), + slog.Duration("duration", time.Since(started)), ) }() } diff --git a/internal/tools/capture.go b/internal/tools/capture.go index 4005410..d47950a 100644 --- a/internal/tools/capture.go +++ b/internal/tools/capture.go @@ -2,9 +2,7 @@ package tools import ( "context" - "log/slog" "strings" - "time" "github.com/google/uuid" "github.com/modelcontextprotocol/go-sdk/mcp" @@ -29,15 +27,12 @@ type MetadataQueuer interface { } type CaptureTool struct { - store *store.DB - embeddings *ai.EmbeddingRunner - metadata *ai.MetadataRunner - capture config.CaptureConfig - sessions *session.ActiveProjects - metadataTimeout time.Duration - retryer MetadataQueuer - embedRetryer EmbeddingQueuer - log *slog.Logger + store *store.DB + embeddings *ai.EmbeddingRunner + capture config.CaptureConfig + sessions *session.ActiveProjects + retryer MetadataQueuer + embedRetryer EmbeddingQueuer } type CaptureInput struct { @@ -49,8 +44,8 @@ type CaptureOutput struct { Thought thoughttypes.Thought `json:"thought"` } -func NewCaptureTool(db *store.DB, embeddings *ai.EmbeddingRunner, metadata *ai.MetadataRunner, capture config.CaptureConfig, metadataTimeout time.Duration, sessions *session.ActiveProjects, retryer MetadataQueuer, embedRetryer EmbeddingQueuer, log *slog.Logger) *CaptureTool { - return &CaptureTool{store: db, embeddings: embeddings, metadata: metadata, capture: capture, sessions: sessions, metadataTimeout: metadataTimeout, retryer: retryer, embedRetryer: embedRetryer, log: log} +func NewCaptureTool(db *store.DB, embeddings *ai.EmbeddingRunner, capture config.CaptureConfig, sessions *session.ActiveProjects, retryer MetadataQueuer, embedRetryer EmbeddingQueuer) *CaptureTool { + return &CaptureTool{store: db, embeddings: embeddings, capture: capture, sessions: sessions, retryer: retryer, embedRetryer: embedRetryer} } func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in CaptureInput) (*mcp.CallToolResult, CaptureOutput, error) { @@ -65,6 +60,7 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C } rawMetadata := metadata.Fallback(t.capture) + rawMetadata.MetadataStatus = metadata.MetadataStatusPending thought := thoughttypes.Thought{ Content: content, Metadata: rawMetadata, @@ -81,56 +77,12 @@ func (t *CaptureTool) Handle(ctx context.Context, req *mcp.CallToolRequest, in C _ = t.store.TouchProject(ctx, project.ID) } - if t.retryer != nil || t.embedRetryer != nil { - t.launchEnrichment(created.ID, content) + if t.retryer != nil { + t.retryer.QueueThought(created.ID) + } + if t.embedRetryer != nil { + t.embedRetryer.QueueThought(ctx, created.ID, content) } return nil, CaptureOutput{Thought: created}, nil } - -func (t *CaptureTool) launchEnrichment(id uuid.UUID, content string) { - go func() { - ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) - defer cancel() - - if t.retryer != nil { - attemptedAt := time.Now().UTC() - rawMetadata := metadata.Fallback(t.capture) - extracted, err := t.metadata.ExtractMetadata(ctx, content) - if err != nil { - failed := metadata.MarkMetadataFailed(rawMetadata, t.capture, attemptedAt, err) - if _, updateErr := t.store.UpdateThoughtMetadata(ctx, id, failed); updateErr != nil { - t.log.Warn("deferred metadata failure could not be persisted", - slog.String("thought_id", id.String()), - slog.String("error", updateErr.Error()), - ) - } - t.log.Warn("deferred metadata extraction failed", - slog.String("thought_id", id.String()), - slog.String("provider", t.metadata.PrimaryProvider()), - slog.String("error", err.Error()), - ) - t.retryer.QueueThought(id) - } else { - completed := metadata.MarkMetadataComplete(extracted, t.capture, attemptedAt) - if _, updateErr := t.store.UpdateThoughtMetadata(ctx, id, completed); updateErr != nil { - t.log.Warn("deferred metadata completion could not be persisted", - slog.String("thought_id", id.String()), - slog.String("error", updateErr.Error()), - ) - } - } - } - - if t.embedRetryer != nil { - if _, err := t.embeddings.Embed(ctx, content); err != nil { - t.log.Warn("deferred embedding failed", - slog.String("thought_id", id.String()), - slog.String("provider", t.embeddings.PrimaryProvider()), - slog.String("error", err.Error()), - ) - } - t.embedRetryer.QueueThought(ctx, id, content) - } - }() -} diff --git a/internal/tools/enrichment_retry.go b/internal/tools/enrichment_retry.go index ef770b3..a95eaea 100644 --- a/internal/tools/enrichment_retry.go +++ b/internal/tools/enrichment_retry.go @@ -91,12 +91,30 @@ func (t *RetryEnrichmentTool) Handle(ctx context.Context, req *mcp.CallToolReque func (r *EnrichmentRetryer) QueueThought(id uuid.UUID) { go func() { - if _, err := r.retryOne(r.backgroundCtx, id); err != nil { - r.logger.Warn("background metadata retry failed", + started := time.Now() + r.logger.Info("background metadata started", + slog.String("thought_id", id.String()), + slog.String("provider", r.metadata.PrimaryProvider()), + slog.String("model", r.metadata.PrimaryModel()), + ) + updated, err := r.retryOne(r.backgroundCtx, id) + if err != nil { + r.logger.Warn("background metadata error", slog.String("thought_id", id.String()), + slog.String("provider", r.metadata.PrimaryProvider()), + slog.String("model", r.metadata.PrimaryModel()), + slog.Duration("duration", time.Since(started)), slog.String("error", err.Error()), ) + return } + r.logger.Info("background metadata complete", + slog.String("thought_id", id.String()), + slog.String("provider", r.metadata.PrimaryProvider()), + slog.String("model", r.metadata.PrimaryModel()), + slog.Bool("updated", updated), + slog.Duration("duration", time.Since(started)), + ) }() } diff --git a/internal/tools/metadata_retry.go b/internal/tools/metadata_retry.go index 06fa321..d11c26f 100644 --- a/internal/tools/metadata_retry.go +++ b/internal/tools/metadata_retry.go @@ -113,13 +113,35 @@ func (t *RetryMetadataTool) Handle(ctx context.Context, req *mcp.CallToolRequest func (r *MetadataRetryer) QueueThought(id uuid.UUID) { go func() { + started := time.Now() if !r.lock.Acquire(id, 15*time.Minute) { return } defer r.lock.Release(id) - if _, err := r.retryOne(r.backgroundCtx, id); err != nil { - r.logger.Warn("background metadata retry failed", slog.String("thought_id", id.String()), slog.String("error", err.Error())) + + r.logger.Info("background metadata started", + slog.String("thought_id", id.String()), + slog.String("provider", r.metadata.PrimaryProvider()), + slog.String("model", r.metadata.PrimaryModel()), + ) + updated, err := r.retryOne(r.backgroundCtx, id) + if err != nil { + r.logger.Warn("background metadata error", + slog.String("thought_id", id.String()), + slog.String("provider", r.metadata.PrimaryProvider()), + slog.String("model", r.metadata.PrimaryModel()), + slog.Duration("duration", time.Since(started)), + slog.String("error", err.Error()), + ) + return } + r.logger.Info("background metadata complete", + slog.String("thought_id", id.String()), + slog.String("provider", r.metadata.PrimaryProvider()), + slog.String("model", r.metadata.PrimaryModel()), + slog.Bool("updated", updated), + slog.Duration("duration", time.Since(started)), + ) }() } From 512b16f8fe1b01cb62e91ffb11c8bd2644f77591 Mon Sep 17 00:00:00 2001 From: Hein Date: Tue, 21 Apr 2026 22:35:42 +0200 Subject: [PATCH 09/15] feat(observability): add MCP tool name logging in access log * Include tool name from request in access log entries * Update user agent header in HTTP requests * Add tests for MCP tool name logging --- cmd/amcs-cli/cmd/root.go | 5 +++ cmd/amcs-cli/cmd/root_test.go | 4 ++ internal/observability/http.go | 66 +++++++++++++++++++++++++++- internal/observability/http_test.go | 34 ++++++++++++++ internal/requestip/requestip.go | 10 ++--- internal/requestip/requestip_test.go | 12 +---- 6 files changed, 111 insertions(+), 20 deletions(-) diff --git a/cmd/amcs-cli/cmd/root.go b/cmd/amcs-cli/cmd/root.go index e1a69c6..b658b6f 100644 --- a/cmd/amcs-cli/cmd/root.go +++ b/cmd/amcs-cli/cmd/root.go @@ -21,6 +21,8 @@ var ( cfg Config ) +const cliUserAgent = "amcs-cli/0.0.1" + var rootCmd = &cobra.Command{ Use: "amcs-cli", Short: "CLI for connecting to a remote AMCS MCP server", @@ -114,6 +116,9 @@ func (t *bearerTransport) RoundTrip(req *http.Request) (*http.Response, error) { base = http.DefaultTransport } clone := req.Clone(req.Context()) + if strings.TrimSpace(clone.Header.Get("User-Agent")) == "" { + clone.Header.Set("User-Agent", cliUserAgent) + } if strings.TrimSpace(t.token) != "" { clone.Header.Set("Authorization", "Bearer "+t.token) } diff --git a/cmd/amcs-cli/cmd/root_test.go b/cmd/amcs-cli/cmd/root_test.go index 2bb737e..16ad0d4 100644 --- a/cmd/amcs-cli/cmd/root_test.go +++ b/cmd/amcs-cli/cmd/root_test.go @@ -8,11 +8,15 @@ import ( func TestBearerTransportFormatsBearerToken(t *testing.T) { const want = "Bearer X" + const wantUA = "amcs-cli/0.0.1" ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if got := r.Header.Get("Authorization"); got != want { t.Fatalf("Authorization header = %q, want %q", got, want) } + if got := r.Header.Get("User-Agent"); got != wantUA { + t.Fatalf("User-Agent header = %q, want %q", got, wantUA) + } w.WriteHeader(http.StatusNoContent) })) defer ts.Close() diff --git a/internal/observability/http.go b/internal/observability/http.go index 09aaef0..e442e9a 100644 --- a/internal/observability/http.go +++ b/internal/observability/http.go @@ -1,10 +1,14 @@ package observability import ( + "bytes" "context" + "encoding/json" + "io" "log/slog" "net/http" "runtime/debug" + "strings" "time" "github.com/google/uuid" @@ -15,6 +19,7 @@ import ( type contextKey string const requestIDContextKey contextKey = "request_id" +const mcpToolContextKey contextKey = "mcp_tool" func Chain(h http.Handler, middlewares ...func(http.Handler) http.Handler) http.Handler { for i := len(middlewares) - 1; i >= 0; i-- { @@ -58,18 +63,26 @@ func Recover(log *slog.Logger) func(http.Handler) http.Handler { func AccessLog(log *slog.Logger) func(http.Handler) http.Handler { return func(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if tool := mcpToolFromRequest(r); tool != "" { + r = r.WithContext(context.WithValue(r.Context(), mcpToolContextKey, tool)) + } + recorder := &statusRecorder{ResponseWriter: w, status: http.StatusOK} started := time.Now() next.ServeHTTP(recorder, r) - log.Info("http request", + attrs := []any{ slog.String("request_id", RequestIDFromContext(r.Context())), slog.String("method", r.Method), slog.String("path", r.URL.Path), slog.Int("status", recorder.status), slog.Duration("duration", time.Since(started)), slog.String("remote_addr", requestip.FromRequest(r)), - ) + } + if tool, _ := r.Context().Value(mcpToolContextKey).(string); strings.TrimSpace(tool) != "" { + attrs = append(attrs, slog.String("tool", tool)) + } + log.Info("http request", attrs...) }) } } @@ -101,3 +114,52 @@ func (s *statusRecorder) WriteHeader(statusCode int) { s.status = statusCode s.ResponseWriter.WriteHeader(statusCode) } + +func mcpToolFromRequest(r *http.Request) string { + if r == nil || r.Method != http.MethodPost || !strings.HasPrefix(r.URL.Path, "/mcp") || r.Body == nil { + return "" + } + + raw, err := io.ReadAll(r.Body) + if err != nil { + return "" + } + r.Body = io.NopCloser(bytes.NewReader(raw)) + if len(raw) == 0 { + return "" + } + + // Support both single and batch JSON-RPC payloads. + if strings.HasPrefix(strings.TrimSpace(string(raw)), "[") { + var batch []rpcEnvelope + if err := json.Unmarshal(raw, &batch); err != nil { + return "" + } + for _, msg := range batch { + if tool := msg.toolName(); tool != "" { + return tool + } + } + return "" + } + + var msg rpcEnvelope + if err := json.Unmarshal(raw, &msg); err != nil { + return "" + } + return msg.toolName() +} + +type rpcEnvelope struct { + Method string `json:"method"` + Params struct { + Name string `json:"name"` + } `json:"params"` +} + +func (m rpcEnvelope) toolName() string { + if m.Method != "tools/call" { + return "" + } + return strings.TrimSpace(m.Params.Name) +} diff --git a/internal/observability/http_test.go b/internal/observability/http_test.go index fe69358..cf97b4f 100644 --- a/internal/observability/http_test.go +++ b/internal/observability/http_test.go @@ -2,6 +2,7 @@ package observability import ( "bytes" + "encoding/json" "io" "log/slog" "net/http" @@ -80,3 +81,36 @@ func TestAccessLogUsesForwardedClientIP(t *testing.T) { t.Fatalf("log output = %q, want remote_addr=203.0.113.7", buf.String()) } } + +func TestAccessLogIncludesMCPToolName(t *testing.T) { + var buf bytes.Buffer + logger := slog.New(slog.NewTextHandler(&buf, nil)) + handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNoContent) + })) + + payload := map[string]any{ + "jsonrpc": "2.0", + "id": "1", + "method": "tools/call", + "params": map[string]any{ + "name": "list_projects", + "arguments": map[string]any{}, + }, + } + body, err := json.Marshal(payload) + if err != nil { + t.Fatalf("json.Marshal() error = %v", err) + } + + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + rec := httptest.NewRecorder() + handler.ServeHTTP(rec, req) + + if rec.Code != http.StatusNoContent { + t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent) + } + if !strings.Contains(buf.String(), "tool=list_projects") { + t.Fatalf("log output = %q, want tool=list_projects", buf.String()) + } +} diff --git a/internal/requestip/requestip.go b/internal/requestip/requestip.go index ea5ffee..e639cf1 100644 --- a/internal/requestip/requestip.go +++ b/internal/requestip/requestip.go @@ -11,10 +11,9 @@ import ( // // Header precedence: // 1) X-Real-IP -// 2) X-Forwarded-Host -// 3) X-Forwarded-For (first value) -// 4) Forwarded (for=...) -// 5) RemoteAddr (host part) +// 2) X-Forwarded-For (first value) +// 3) Forwarded (for=...) +// 4) RemoteAddr (host part) func FromRequest(r *http.Request) string { if r == nil { return "" @@ -23,9 +22,6 @@ func FromRequest(r *http.Request) string { if v := firstAddressToken(r.Header.Get("X-Real-IP")); v != "" { return stripPort(v) } - if v := firstAddressToken(r.Header.Get("X-Forwarded-Host")); v != "" { - return stripPort(v) - } if v := firstAddressToken(r.Header.Get("X-Forwarded-For")); v != "" { return stripPort(v) } diff --git a/internal/requestip/requestip_test.go b/internal/requestip/requestip_test.go index 09a15a7..0756ed2 100644 --- a/internal/requestip/requestip_test.go +++ b/internal/requestip/requestip_test.go @@ -9,7 +9,7 @@ import ( func TestFromRequestPrefersXRealIP(t *testing.T) { req := httptest.NewRequest(http.MethodGet, "/", nil) req.RemoteAddr = "10.0.0.10:5555" - req.Header.Set("X-Forwarded-Host", "proxy.example.com") + req.Header.Set("X-Forwarded-For", "198.51.100.1") req.Header.Set("X-Real-IP", "203.0.113.10") if got := FromRequest(req); got != "203.0.113.10" { @@ -17,16 +17,6 @@ func TestFromRequestPrefersXRealIP(t *testing.T) { } } -func TestFromRequestUsesXForwardedHostWhenRealIPMissing(t *testing.T) { - req := httptest.NewRequest(http.MethodGet, "/", nil) - req.RemoteAddr = "10.0.0.10:5555" - req.Header.Set("X-Forwarded-Host", "203.0.113.22") - - if got := FromRequest(req); got != "203.0.113.22" { - t.Fatalf("FromRequest() = %q, want %q", got, "203.0.113.22") - } -} - func TestFromRequestUsesXForwardedForFirstValue(t *testing.T) { req := httptest.NewRequest(http.MethodGet, "/", nil) req.RemoteAddr = "10.0.0.10:5555" From 3dfed9c986a4354be3da35892036bdff6b38e8d3 Mon Sep 17 00:00:00 2001 From: Hein Date: Tue, 21 Apr 2026 23:04:46 +0200 Subject: [PATCH 10/15] fix(observability): include MCP session ID in access logs * Add function to extract MCP session ID from request headers and query parameters * Update access log to include MCP session ID fix(cli): simplify project lookup logic * Refactor project retrieval to prefer GUID lookup when input is a valid UUID * Introduce separate functions for fetching projects by GUID and name --- cmd/amcs-cli/cmd/root.go | 8 ++--- cmd/amcs-cli/cmd/sse.go | 9 +++--- internal/observability/http.go | 19 +++++++++++- internal/observability/http_test.go | 42 ++++++++++++++++++++++++++ internal/store/projects.go | 47 +++++++++++++++++++++-------- 5 files changed, 101 insertions(+), 24 deletions(-) diff --git a/cmd/amcs-cli/cmd/root.go b/cmd/amcs-cli/cmd/root.go index b658b6f..d0c9336 100644 --- a/cmd/amcs-cli/cmd/root.go +++ b/cmd/amcs-cli/cmd/root.go @@ -6,7 +6,6 @@ import ( "net/http" "os" "strings" - "time" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/spf13/cobra" @@ -132,11 +131,10 @@ func connectRemote(ctx context.Context) (*mcp.ClientSession, error) { verboseLogf("connecting to %s", endpointURL()) client := mcp.NewClient(&mcp.Implementation{Name: "amcs-cli", Version: "0.0.1"}, nil) transport := &mcp.StreamableClientTransport{ - Endpoint: endpointURL(), - HTTPClient: newHTTPClient(), + Endpoint: endpointURL(), + HTTPClient: newHTTPClient(), + DisableStandaloneSSE: true, } - ctx, cancel := context.WithTimeout(ctx, 30*time.Second) - defer cancel() session, err := client.Connect(ctx, transport, nil) if err != nil { return nil, fmt.Errorf("connect to AMCS server: %w", err) diff --git a/cmd/amcs-cli/cmd/sse.go b/cmd/amcs-cli/cmd/sse.go index a5a7b47..2d08341 100644 --- a/cmd/amcs-cli/cmd/sse.go +++ b/cmd/amcs-cli/cmd/sse.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "strings" - "time" "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/spf13/cobra" @@ -26,11 +25,8 @@ var sseCmd = &cobra.Command{ HTTPClient: newHTTPClient(), } - connectCtx, cancel := context.WithTimeout(ctx, 30*time.Second) - defer cancel() - verboseLogf("connecting to SSE endpoint %s", sseEndpointURL()) - remote, err := client.Connect(connectCtx, transport, nil) + remote, err := client.Connect(ctx, transport, nil) if err != nil { return fmt.Errorf("connect to AMCS SSE endpoint: %w", err) } @@ -79,6 +75,9 @@ var sseCmd = &cobra.Command{ func sseEndpointURL() string { base := strings.TrimRight(strings.TrimSpace(cfg.Server), "/") + if strings.HasSuffix(base, "/mcp") { + base = strings.TrimSuffix(base, "/mcp") + } if strings.HasSuffix(base, "/sse") { return base } diff --git a/internal/observability/http.go b/internal/observability/http.go index e442e9a..064392f 100644 --- a/internal/observability/http.go +++ b/internal/observability/http.go @@ -78,9 +78,10 @@ func AccessLog(log *slog.Logger) func(http.Handler) http.Handler { slog.Int("status", recorder.status), slog.Duration("duration", time.Since(started)), slog.String("remote_addr", requestip.FromRequest(r)), + slog.String("mcp_session_id", mcpSessionIDFromRequest(r)), } if tool, _ := r.Context().Value(mcpToolContextKey).(string); strings.TrimSpace(tool) != "" { - attrs = append(attrs, slog.String("tool", tool)) + attrs = append(attrs, slog.String("tool", tool), slog.String("tool_call", tool)) } log.Info("http request", attrs...) }) @@ -150,6 +151,22 @@ func mcpToolFromRequest(r *http.Request) string { return msg.toolName() } +func mcpSessionIDFromRequest(r *http.Request) string { + if r == nil { + return "" + } + if v := strings.TrimSpace(r.Header.Get("MCP-Session-Id")); v != "" { + return v + } + // Some clients/proxies may propagate the session in query params. + for _, key := range []string{"session_id", "sessionId", "mcp_session_id"} { + if v := strings.TrimSpace(r.URL.Query().Get(key)); v != "" { + return v + } + } + return "" +} + type rpcEnvelope struct { Method string `json:"method"` Params struct { diff --git a/internal/observability/http_test.go b/internal/observability/http_test.go index cf97b4f..6e4d0ea 100644 --- a/internal/observability/http_test.go +++ b/internal/observability/http_test.go @@ -113,4 +113,46 @@ func TestAccessLogIncludesMCPToolName(t *testing.T) { if !strings.Contains(buf.String(), "tool=list_projects") { t.Fatalf("log output = %q, want tool=list_projects", buf.String()) } + if !strings.Contains(buf.String(), "tool_call=list_projects") { + t.Fatalf("log output = %q, want tool_call=list_projects", buf.String()) + } +} + +func TestAccessLogIncludesMCPSessionIDHeader(t *testing.T) { + var buf bytes.Buffer + logger := slog.New(slog.NewTextHandler(&buf, nil)) + handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNoContent) + })) + + req := httptest.NewRequest(http.MethodGet, "/sse", nil) + req.Header.Set("MCP-Session-Id", "sess-123") + rec := httptest.NewRecorder() + handler.ServeHTTP(rec, req) + + if rec.Code != http.StatusNoContent { + t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent) + } + if !strings.Contains(buf.String(), "mcp_session_id=sess-123") { + t.Fatalf("log output = %q, want mcp_session_id=sess-123", buf.String()) + } +} + +func TestAccessLogIncludesMCPSessionIDQueryParam(t *testing.T) { + var buf bytes.Buffer + logger := slog.New(slog.NewTextHandler(&buf, nil)) + handler := AccessLog(logger)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNoContent) + })) + + req := httptest.NewRequest(http.MethodGet, "/sse?session_id=sess-q-1", nil) + rec := httptest.NewRecorder() + handler.ServeHTTP(rec, req) + + if rec.Code != http.StatusNoContent { + t.Fatalf("status = %d, want %d", rec.Code, http.StatusNoContent) + } + if !strings.Contains(buf.String(), "mcp_session_id=sess-q-1") { + t.Fatalf("log output = %q, want mcp_session_id=sess-q-1", buf.String()) + } } diff --git a/internal/store/projects.go b/internal/store/projects.go index d3a707f..aa1834a 100644 --- a/internal/store/projects.go +++ b/internal/store/projects.go @@ -26,21 +26,42 @@ func (db *DB) CreateProject(ctx context.Context, name, description string) (thou } func (db *DB) GetProject(ctx context.Context, nameOrID string) (thoughttypes.Project, error) { - var row pgx.Row - if parsedID, err := uuid.Parse(strings.TrimSpace(nameOrID)); err == nil { - row = db.pool.QueryRow(ctx, ` - select guid, name, description, created_at, last_active_at - from projects - where guid = $1 - `, parsedID) - } else { - row = db.pool.QueryRow(ctx, ` - select guid, name, description, created_at, last_active_at - from projects - where name = $1 - `, strings.TrimSpace(nameOrID)) + lookup := strings.TrimSpace(nameOrID) + + // Prefer guid lookup when input parses as UUID, but fall back to name lookup + // so UUID-shaped project names can still be resolved by name. + if parsedID, err := uuid.Parse(lookup); err == nil { + project, queryErr := db.getProjectByGUID(ctx, parsedID) + if queryErr == nil { + return project, nil + } + if queryErr != pgx.ErrNoRows { + return thoughttypes.Project{}, queryErr + } } + return db.getProjectByName(ctx, lookup) +} + +func (db *DB) getProjectByGUID(ctx context.Context, id uuid.UUID) (thoughttypes.Project, error) { + row := db.pool.QueryRow(ctx, ` + select guid, name, description, created_at, last_active_at + from projects + where guid = $1 + `, id) + return scanProject(row) +} + +func (db *DB) getProjectByName(ctx context.Context, name string) (thoughttypes.Project, error) { + row := db.pool.QueryRow(ctx, ` + select guid, name, description, created_at, last_active_at + from projects + where name = $1 + `, name) + return scanProject(row) +} + +func scanProject(row pgx.Row) (thoughttypes.Project, error) { var project thoughttypes.Project if err := row.Scan(&project.ID, &project.Name, &project.Description, &project.CreatedAt, &project.LastActiveAt); err != nil { if err == pgx.ErrNoRows { From 27cd494f6dbe70299b0d1e7df073fa8a31e26888 Mon Sep 17 00:00:00 2001 From: sgcommand Date: Wed, 22 Apr 2026 13:40:44 +0200 Subject: [PATCH 11/15] feat(schema): add structured learnings DBML model --- migrations/020_generated_schema.sql | 415 ++++++++++++++++++++++++++++ schema/meta.dbml | 43 +++ 2 files changed, 458 insertions(+) diff --git a/migrations/020_generated_schema.sql b/migrations/020_generated_schema.sql index 34a1e4f..981611d 100644 --- a/migrations/020_generated_schema.sql +++ b/migrations/020_generated_schema.sql @@ -275,6 +275,30 @@ CREATE TABLE IF NOT EXISTS public.tool_annotations ( updated_at timestamptz NOT NULL DEFAULT now() ); +CREATE TABLE IF NOT EXISTS public.learnings ( + action_required boolean NOT NULL DEFAULT false, + area text NOT NULL DEFAULT 'other', + category text NOT NULL DEFAULT 'insight', + confidence text NOT NULL DEFAULT 'hypothesis', + created_at timestamptz NOT NULL DEFAULT now(), + details text NOT NULL DEFAULT '', + duplicate_of_learning_id uuid, + id uuid NOT NULL DEFAULT gen_random_uuid(), + priority text NOT NULL DEFAULT 'medium', + project_id uuid, + related_skill_id uuid, + related_thought_id uuid, + reviewed_at timestamptz, + reviewed_by text, + source_ref text, + source_type text, + status text NOT NULL DEFAULT 'pending', + summary text NOT NULL, + supersedes_learning_id uuid, + tags text, + updated_at timestamptz NOT NULL DEFAULT now() +); + CREATE TABLE IF NOT EXISTS public.agent_skills ( content text NOT NULL, created_at timestamptz NOT NULL DEFAULT now(), @@ -2597,6 +2621,279 @@ BEGIN END; $$; +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'action_required' + ) THEN + ALTER TABLE public.learnings ADD COLUMN action_required boolean NOT NULL DEFAULT false; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'area' + ) THEN + ALTER TABLE public.learnings ADD COLUMN area text NOT NULL DEFAULT 'other'; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'category' + ) THEN + ALTER TABLE public.learnings ADD COLUMN category text NOT NULL DEFAULT 'insight'; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'confidence' + ) THEN + ALTER TABLE public.learnings ADD COLUMN confidence text NOT NULL DEFAULT 'hypothesis'; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'created_at' + ) THEN + ALTER TABLE public.learnings ADD COLUMN created_at timestamptz NOT NULL DEFAULT now(); + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'details' + ) THEN + ALTER TABLE public.learnings ADD COLUMN details text NOT NULL DEFAULT ''; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'duplicate_of_learning_id' + ) THEN + ALTER TABLE public.learnings ADD COLUMN duplicate_of_learning_id uuid; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'id' + ) THEN + ALTER TABLE public.learnings ADD COLUMN id uuid NOT NULL DEFAULT gen_random_uuid(); + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'priority' + ) THEN + ALTER TABLE public.learnings ADD COLUMN priority text NOT NULL DEFAULT 'medium'; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'project_id' + ) THEN + ALTER TABLE public.learnings ADD COLUMN project_id uuid; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'related_skill_id' + ) THEN + ALTER TABLE public.learnings ADD COLUMN related_skill_id uuid; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'related_thought_id' + ) THEN + ALTER TABLE public.learnings ADD COLUMN related_thought_id uuid; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'reviewed_at' + ) THEN + ALTER TABLE public.learnings ADD COLUMN reviewed_at timestamptz; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'reviewed_by' + ) THEN + ALTER TABLE public.learnings ADD COLUMN reviewed_by text; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'source_ref' + ) THEN + ALTER TABLE public.learnings ADD COLUMN source_ref text; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'source_type' + ) THEN + ALTER TABLE public.learnings ADD COLUMN source_type text; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'status' + ) THEN + ALTER TABLE public.learnings ADD COLUMN status text NOT NULL DEFAULT 'pending'; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'summary' + ) THEN + ALTER TABLE public.learnings ADD COLUMN summary text NOT NULL; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'supersedes_learning_id' + ) THEN + ALTER TABLE public.learnings ADD COLUMN supersedes_learning_id uuid; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'tags' + ) THEN + ALTER TABLE public.learnings ADD COLUMN tags text; + END IF; +END; +$$; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND column_name = 'updated_at' + ) THEN + ALTER TABLE public.learnings ADD COLUMN updated_at timestamptz NOT NULL DEFAULT now(); + END IF; +END; +$$; + DO $$ BEGIN IF NOT EXISTS ( @@ -3403,6 +3700,34 @@ BEGIN END; $$; +DO $$ +DECLARE + auto_pk_name text; +BEGIN + -- Drop auto-generated primary key if it exists + SELECT constraint_name INTO auto_pk_name + FROM information_schema.table_constraints + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND constraint_type = 'PRIMARY KEY' + AND constraint_name IN ('learnings_pkey', 'public_learnings_pkey'); + + IF auto_pk_name IS NOT NULL THEN + EXECUTE 'ALTER TABLE public.learnings DROP CONSTRAINT ' || quote_ident(auto_pk_name); + END IF; + + -- Add named primary key if it doesn't exist + IF NOT EXISTS ( + SELECT 1 FROM information_schema.table_constraints + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND constraint_name = 'pk_public_learnings' + ) THEN + ALTER TABLE public.learnings ADD CONSTRAINT pk_public_learnings PRIMARY KEY (id); + END IF; +END; +$$; + DO $$ DECLARE auto_pk_name text; @@ -3475,6 +3800,15 @@ CREATE INDEX IF NOT EXISTS idx_contact_interactions_contact_id_occurred_at CREATE INDEX IF NOT EXISTS idx_maintenance_logs_task_id_completed_at ON public.maintenance_logs USING btree (task_id, completed_at); +CREATE INDEX IF NOT EXISTS idx_learnings_details + ON public.learnings USING gin (details gin_trgm_ops); + +CREATE INDEX IF NOT EXISTS idx_learnings_summary + ON public.learnings USING gin (summary gin_trgm_ops); + +CREATE INDEX IF NOT EXISTS idx_learnings_tags + ON public.learnings USING gin (tags gin_trgm_ops); + CREATE INDEX IF NOT EXISTS idx_project_skills_project_id_skill_id ON public.project_skills USING btree (project_id, skill_id); @@ -3810,6 +4144,86 @@ BEGIN END IF; END; $$;DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.table_constraints + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND constraint_name = 'fk_learnings_duplicate_of_learning_id' + ) THEN + ALTER TABLE public.learnings + ADD CONSTRAINT fk_learnings_duplicate_of_learning_id + FOREIGN KEY (duplicate_of_learning_id) + REFERENCES public.learnings (id) + ON DELETE NO ACTION + ON UPDATE NO ACTION; + END IF; +END; +$$;DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.table_constraints + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND constraint_name = 'fk_learnings_project_id' + ) THEN + ALTER TABLE public.learnings + ADD CONSTRAINT fk_learnings_project_id + FOREIGN KEY (project_id) + REFERENCES public.projects (guid) + ON DELETE NO ACTION + ON UPDATE NO ACTION; + END IF; +END; +$$;DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.table_constraints + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND constraint_name = 'fk_learnings_related_skill_id' + ) THEN + ALTER TABLE public.learnings + ADD CONSTRAINT fk_learnings_related_skill_id + FOREIGN KEY (related_skill_id) + REFERENCES public.agent_skills (id) + ON DELETE NO ACTION + ON UPDATE NO ACTION; + END IF; +END; +$$;DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.table_constraints + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND constraint_name = 'fk_learnings_related_thought_id' + ) THEN + ALTER TABLE public.learnings + ADD CONSTRAINT fk_learnings_related_thought_id + FOREIGN KEY (related_thought_id) + REFERENCES public.thoughts (guid) + ON DELETE NO ACTION + ON UPDATE NO ACTION; + END IF; +END; +$$;DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.table_constraints + WHERE table_schema = 'public' + AND table_name = 'learnings' + AND constraint_name = 'fk_learnings_supersedes_learning_id' + ) THEN + ALTER TABLE public.learnings + ADD CONSTRAINT fk_learnings_supersedes_learning_id + FOREIGN KEY (supersedes_learning_id) + REFERENCES public.learnings (id) + ON DELETE NO ACTION + ON UPDATE NO ACTION; + END IF; +END; +$$;DO $$ BEGIN IF NOT EXISTS ( SELECT 1 FROM information_schema.table_constraints @@ -3992,5 +4406,6 @@ $$; + diff --git a/schema/meta.dbml b/schema/meta.dbml index 792ccde..f83200d 100644 --- a/schema/meta.dbml +++ b/schema/meta.dbml @@ -30,3 +30,46 @@ Table tool_annotations { // Cross-file refs (for relspecgo merge) Ref: chat_histories.project_id > projects.guid [delete: set null] + +Table learnings { + id uuid [pk, default: `gen_random_uuid()`] + summary text [not null] + details text [not null, default: ''] + category text [not null, default: 'insight'] + area text [not null, default: 'other'] + status text [not null, default: 'pending'] + priority text [not null, default: 'medium'] + confidence text [not null, default: 'hypothesis'] + action_required boolean [not null, default: false] + source_type text + source_ref text + project_id uuid [ref: > projects.guid] + related_thought_id uuid [ref: > thoughts.guid] + related_skill_id uuid [ref: > agent_skills.id] + reviewed_by text + reviewed_at timestamptz + duplicate_of_learning_id uuid [ref: > learnings.id] + supersedes_learning_id uuid [ref: > learnings.id] + tags "text[]" [not null, default: `'{}'`] + created_at timestamptz [not null, default: `now()`] + updated_at timestamptz [not null, default: `now()`] + + indexes { + project_id + category + area + status + priority + reviewed_at + tags [type: gin] + summary [type: gin] + details [type: gin] + } +} + +// Cross-file refs (for relspecgo merge) +Ref: learnings.project_id > projects.guid [delete: set null] +Ref: learnings.related_thought_id > thoughts.guid [delete: set null] +Ref: learnings.related_skill_id > agent_skills.id [delete: set null] +Ref: learnings.duplicate_of_learning_id > learnings.id [delete: set null] +Ref: learnings.supersedes_learning_id > learnings.id [delete: set null] From 3e832eea98d9ac1bf5ba75e43abd76d57cc1013f Mon Sep 17 00:00:00 2001 From: sgcommand Date: Wed, 22 Apr 2026 14:00:12 +0200 Subject: [PATCH 12/15] feat(learnings): add store and MCP tool layer --- internal/app/app.go | 1 + internal/mcpserver/server.go | 29 ++++ internal/mcpserver/server_test.go | 3 + internal/store/learnings.go | 215 ++++++++++++++++++++++++++++++ internal/tools/learnings.go | 174 ++++++++++++++++++++++++ internal/types/learning.go | 68 ++++++++++ 6 files changed, 490 insertions(+) create mode 100644 internal/store/learnings.go create mode 100644 internal/tools/learnings.go create mode 100644 internal/types/learning.go diff --git a/internal/app/app.go b/internal/app/app.go index 3359c30..305727f 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -203,6 +203,7 @@ func routes(logger *slog.Logger, cfg *config.Config, info buildinfo.Info, db *st Archive: tools.NewArchiveTool(db), Projects: tools.NewProjectsTool(db, activeProjects), Version: tools.NewVersionTool(cfg.MCP.ServerName, info), + Learnings: tools.NewLearningsTool(db, activeProjects, cfg.Search), Context: tools.NewContextTool(db, embeddings, cfg.Search, activeProjects), Recall: tools.NewRecallTool(db, embeddings, cfg.Search, activeProjects), Summarize: tools.NewSummarizeTool(db, embeddings, metadata, cfg.Search, activeProjects), diff --git a/internal/mcpserver/server.go b/internal/mcpserver/server.go index c9e55b5..5f53f5f 100644 --- a/internal/mcpserver/server.go +++ b/internal/mcpserver/server.go @@ -40,6 +40,7 @@ type ToolSet struct { Skills *tools.SkillsTool ChatHistory *tools.ChatHistoryTool Describe *tools.DescribeTool + Learnings *tools.LearningsTool } // Handlers groups the HTTP handlers produced for an MCP server instance. @@ -83,6 +84,7 @@ func NewHandlers(cfg config.MCPConfig, logger *slog.Logger, toolSet ToolSet, onS registerSystemTools, registerThoughtTools, registerProjectTools, + registerLearningTools, registerFileTools, registerMaintenanceTools, registerSkillTools, @@ -249,6 +251,28 @@ func registerProjectTools(server *mcp.Server, logger *slog.Logger, toolSet ToolS return nil } +func registerLearningTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error { + if err := addTool(server, logger, &mcp.Tool{ + Name: "add_learning", + Description: "Create a curated learning record distinct from raw thoughts.", + }, toolSet.Learnings.Add); err != nil { + return err + } + if err := addTool(server, logger, &mcp.Tool{ + Name: "get_learning", + Description: "Retrieve a structured learning by id.", + }, toolSet.Learnings.Get); err != nil { + return err + } + if err := addTool(server, logger, &mcp.Tool{ + Name: "list_learnings", + Description: "List structured learnings with optional project, status, priority, tag, and text filters.", + }, toolSet.Learnings.List); err != nil { + return err + } + return nil +} + func registerFileTools(server *mcp.Server, logger *slog.Logger, toolSet ToolSet) error { server.AddResourceTemplate(&mcp.ResourceTemplate{ Name: "stored_file", @@ -477,6 +501,11 @@ func BuildToolCatalog() []tools.ToolEntry { {Name: "get_active_project", Description: "Return the active project for the current MCP session. If your client does not preserve MCP sessions, pass project explicitly to project-scoped tools instead of relying on this.", Category: "projects"}, {Name: "get_project_context", Description: "Get recent and semantic context for a project. Uses the explicit project when provided, otherwise the active MCP session project. Falls back to full-text search when no embeddings exist.", Category: "projects"}, + // learnings + {Name: "add_learning", Description: "Create a curated learning record distinct from raw thoughts.", Category: "projects"}, + {Name: "get_learning", Description: "Retrieve a structured learning by id.", Category: "projects"}, + {Name: "list_learnings", Description: "List structured learnings with optional project, category, area, status, priority, tag, and text filters.", Category: "projects"}, + // files {Name: "upload_file", Description: "Stage a file and get an amcs://files/{id} resource URI. Use content_path (absolute server-side path, no size limit) for large or binary files, or content_base64 (≤10 MB) for small files. Pass thought_id/project to link immediately, or omit and pass the URI to save_file later.", Category: "files"}, {Name: "save_file", Description: "Store a file and optionally link it to a thought. Use content_base64 (≤10 MB) for small files, or content_uri (amcs://files/{id} from a prior upload_file) for previously staged files. For files larger than 10 MB, use upload_file with content_path first. If the goal is to retain the artifact, store the file directly instead of reading or summarising it first.", Category: "files"}, diff --git a/internal/mcpserver/server_test.go b/internal/mcpserver/server_test.go index 4fcee5c..7c71dc1 100644 --- a/internal/mcpserver/server_test.go +++ b/internal/mcpserver/server_test.go @@ -29,6 +29,7 @@ func TestNewListsAllRegisteredTools(t *testing.T) { want := []string{ "add_guardrail", + "add_learning", "add_maintenance_task", "add_project_guardrail", "add_project_skill", @@ -43,6 +44,7 @@ func TestNewListsAllRegisteredTools(t *testing.T) { "describe_tools", "get_active_project", "get_chat_history", + "get_learning", "get_project_context", "get_thought", "get_upcoming_maintenance", @@ -51,6 +53,7 @@ func TestNewListsAllRegisteredTools(t *testing.T) { "list_chat_histories", "list_files", "list_guardrails", + "list_learnings", "list_project_guardrails", "list_project_skills", "list_projects", diff --git a/internal/store/learnings.go b/internal/store/learnings.go new file mode 100644 index 0000000..9089c93 --- /dev/null +++ b/internal/store/learnings.go @@ -0,0 +1,215 @@ +package store + +import ( + "context" + "fmt" + "strings" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" + + thoughttypes "git.warky.dev/wdevs/amcs/internal/types" +) + +func (db *DB) CreateLearning(ctx context.Context, learning thoughttypes.Learning) (thoughttypes.Learning, error) { + row := db.pool.QueryRow(ctx, ` + insert into learnings ( + summary, details, category, area, status, priority, confidence, + action_required, source_type, source_ref, project_id, related_thought_id, + related_skill_id, reviewed_by, reviewed_at, duplicate_of_learning_id, + supersedes_learning_id, tags + ) values ( + $1, $2, $3, $4, $5, $6, $7, + $8, $9, $10, $11, $12, + $13, $14, $15, $16, + $17, $18 + ) + returning id, created_at, updated_at + `, + strings.TrimSpace(learning.Summary), + strings.TrimSpace(learning.Details), + strings.TrimSpace(learning.Category), + strings.TrimSpace(learning.Area), + string(learning.Status), + string(learning.Priority), + string(learning.Confidence), + learning.ActionRequired, + nullableText(learning.SourceType), + nullableText(learning.SourceRef), + learning.ProjectID, + learning.RelatedThoughtID, + learning.RelatedSkillID, + nullableTextPtr(learning.ReviewedBy), + learning.ReviewedAt, + learning.DuplicateOfLearningID, + learning.SupersedesLearningID, + learning.Tags, + ) + + created := learning + if err := row.Scan(&created.ID, &created.CreatedAt, &created.UpdatedAt); err != nil { + return thoughttypes.Learning{}, fmt.Errorf("create learning: %w", err) + } + return created, nil +} + +func (db *DB) GetLearning(ctx context.Context, id uuid.UUID) (thoughttypes.Learning, error) { + row := db.pool.QueryRow(ctx, ` + select id, summary, details, category, area, status, priority, confidence, + action_required, source_type, source_ref, project_id, related_thought_id, + related_skill_id, reviewed_by, reviewed_at, duplicate_of_learning_id, + supersedes_learning_id, tags, created_at, updated_at + from learnings + where id = $1 + `, id) + + learning, err := scanLearning(row) + if err != nil { + if err == pgx.ErrNoRows { + return thoughttypes.Learning{}, fmt.Errorf("learning not found: %s", id) + } + return thoughttypes.Learning{}, fmt.Errorf("get learning: %w", err) + } + return learning, nil +} + +func (db *DB) ListLearnings(ctx context.Context, filter thoughttypes.LearningFilter) ([]thoughttypes.Learning, error) { + args := make([]any, 0, 8) + conditions := make([]string, 0, 8) + + if filter.ProjectID != nil { + args = append(args, *filter.ProjectID) + conditions = append(conditions, fmt.Sprintf("project_id = $%d", len(args))) + } + if value := strings.TrimSpace(filter.Category); value != "" { + args = append(args, value) + conditions = append(conditions, fmt.Sprintf("category = $%d", len(args))) + } + if value := strings.TrimSpace(filter.Area); value != "" { + args = append(args, value) + conditions = append(conditions, fmt.Sprintf("area = $%d", len(args))) + } + if value := strings.TrimSpace(filter.Status); value != "" { + args = append(args, value) + conditions = append(conditions, fmt.Sprintf("status = $%d", len(args))) + } + if value := strings.TrimSpace(filter.Priority); value != "" { + args = append(args, value) + conditions = append(conditions, fmt.Sprintf("priority = $%d", len(args))) + } + if value := strings.TrimSpace(filter.Tag); value != "" { + args = append(args, value) + conditions = append(conditions, fmt.Sprintf("$%d = any(tags)", len(args))) + } + if value := strings.TrimSpace(filter.Query); value != "" { + args = append(args, value) + conditions = append(conditions, fmt.Sprintf("to_tsvector('simple', summary || ' ' || coalesce(details, '')) @@ websearch_to_tsquery('simple', $%d)", len(args))) + } + + query := ` + select id, summary, details, category, area, status, priority, confidence, + action_required, source_type, source_ref, project_id, related_thought_id, + related_skill_id, reviewed_by, reviewed_at, duplicate_of_learning_id, + supersedes_learning_id, tags, created_at, updated_at + from learnings + ` + if len(conditions) > 0 { + query += " where " + strings.Join(conditions, " and ") + } + query += " order by updated_at desc" + if filter.Limit > 0 { + args = append(args, filter.Limit) + query += fmt.Sprintf(" limit $%d", len(args)) + } + + rows, err := db.pool.Query(ctx, query, args...) + if err != nil { + return nil, fmt.Errorf("list learnings: %w", err) + } + defer rows.Close() + + items := make([]thoughttypes.Learning, 0) + for rows.Next() { + item, err := scanLearning(rows) + if err != nil { + return nil, fmt.Errorf("scan learning: %w", err) + } + items = append(items, item) + } + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("iterate learnings: %w", err) + } + return items, nil +} + +type learningScanner interface { + Scan(dest ...any) error +} + +func scanLearning(row learningScanner) (thoughttypes.Learning, error) { + var learning thoughttypes.Learning + var sourceType pgtype.Text + var sourceRef pgtype.Text + var reviewedBy pgtype.Text + var tags []string + + err := row.Scan( + &learning.ID, + &learning.Summary, + &learning.Details, + &learning.Category, + &learning.Area, + &learning.Status, + &learning.Priority, + &learning.Confidence, + &learning.ActionRequired, + &sourceType, + &sourceRef, + &learning.ProjectID, + &learning.RelatedThoughtID, + &learning.RelatedSkillID, + &reviewedBy, + &learning.ReviewedAt, + &learning.DuplicateOfLearningID, + &learning.SupersedesLearningID, + &tags, + &learning.CreatedAt, + &learning.UpdatedAt, + ) + if err != nil { + return thoughttypes.Learning{}, err + } + + learning.SourceType = sourceType.String + learning.SourceRef = sourceRef.String + if reviewedBy.Valid { + value := reviewedBy.String + learning.ReviewedBy = &value + } + if tags == nil { + learning.Tags = []string{} + } else { + learning.Tags = tags + } + return learning, nil +} + +func nullableText(value string) *string { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + return nil + } + return &trimmed +} + +func nullableTextPtr(value *string) *string { + if value == nil { + return nil + } + trimmed := strings.TrimSpace(*value) + if trimmed == "" { + return nil + } + return &trimmed +} diff --git a/internal/tools/learnings.go b/internal/tools/learnings.go new file mode 100644 index 0000000..b43e3ac --- /dev/null +++ b/internal/tools/learnings.go @@ -0,0 +1,174 @@ +package tools + +import ( + "context" + "strings" + + "github.com/google/uuid" + "github.com/modelcontextprotocol/go-sdk/mcp" + + "git.warky.dev/wdevs/amcs/internal/config" + "git.warky.dev/wdevs/amcs/internal/session" + "git.warky.dev/wdevs/amcs/internal/store" + thoughttypes "git.warky.dev/wdevs/amcs/internal/types" +) + +type LearningsTool struct { + store *store.DB + sessions *session.ActiveProjects + cfg config.SearchConfig +} + +type AddLearningInput struct { + Summary string `json:"summary" jsonschema:"short curated learning summary"` + Details string `json:"details,omitempty" jsonschema:"optional detailed learning body"` + Category string `json:"category,omitempty"` + Area string `json:"area,omitempty"` + Status string `json:"status,omitempty"` + Priority string `json:"priority,omitempty"` + Confidence string `json:"confidence,omitempty"` + ActionRequired *bool `json:"action_required,omitempty"` + SourceType string `json:"source_type,omitempty"` + SourceRef string `json:"source_ref,omitempty"` + Project string `json:"project,omitempty" jsonschema:"project name or id; falls back to active session project"` + RelatedThoughtID *uuid.UUID `json:"related_thought_id,omitempty"` + RelatedSkillID *uuid.UUID `json:"related_skill_id,omitempty"` + ReviewedBy *string `json:"reviewed_by,omitempty"` + DuplicateOfLearningID *uuid.UUID `json:"duplicate_of_learning_id,omitempty"` + SupersedesLearningID *uuid.UUID `json:"supersedes_learning_id,omitempty"` + Tags []string `json:"tags,omitempty"` +} + +type AddLearningOutput struct { + Learning thoughttypes.Learning `json:"learning"` +} + +type GetLearningInput struct { + ID uuid.UUID `json:"id" jsonschema:"learning id"` +} + +type GetLearningOutput struct { + Learning thoughttypes.Learning `json:"learning"` +} + +type ListLearningsInput struct { + Limit int `json:"limit,omitempty"` + Project string `json:"project,omitempty" jsonschema:"project name or id; falls back to active session project"` + Category string `json:"category,omitempty"` + Area string `json:"area,omitempty"` + Status string `json:"status,omitempty"` + Priority string `json:"priority,omitempty"` + Tag string `json:"tag,omitempty"` + Query string `json:"query,omitempty"` +} + +type ListLearningsOutput struct { + Learnings []thoughttypes.Learning `json:"learnings"` +} + +func NewLearningsTool(db *store.DB, sessions *session.ActiveProjects, cfg config.SearchConfig) *LearningsTool { + return &LearningsTool{store: db, sessions: sessions, cfg: cfg} +} + +func (t *LearningsTool) Add(ctx context.Context, req *mcp.CallToolRequest, in AddLearningInput) (*mcp.CallToolResult, AddLearningOutput, error) { + summary := strings.TrimSpace(in.Summary) + if summary == "" { + return nil, AddLearningOutput{}, errRequiredField("summary") + } + + project, err := resolveProject(ctx, t.store, t.sessions, req, in.Project, false) + if err != nil { + return nil, AddLearningOutput{}, err + } + + learning := thoughttypes.Learning{ + Summary: summary, + Details: strings.TrimSpace(in.Details), + Category: defaultString(strings.TrimSpace(in.Category), "insight"), + Area: defaultString(strings.TrimSpace(in.Area), "other"), + Status: thoughttypes.LearningStatus(defaultString(strings.TrimSpace(in.Status), string(thoughttypes.LearningStatusPending))), + Priority: thoughttypes.LearningPriority(defaultString(strings.TrimSpace(in.Priority), string(thoughttypes.LearningPriorityMedium))), + Confidence: thoughttypes.LearningEvidenceLevel(defaultString(strings.TrimSpace(in.Confidence), string(thoughttypes.LearningEvidenceHypothesis))), + SourceType: strings.TrimSpace(in.SourceType), + SourceRef: strings.TrimSpace(in.SourceRef), + RelatedThoughtID: in.RelatedThoughtID, + RelatedSkillID: in.RelatedSkillID, + ReviewedBy: in.ReviewedBy, + DuplicateOfLearningID: in.DuplicateOfLearningID, + SupersedesLearningID: in.SupersedesLearningID, + Tags: normalizeStringSlice(in.Tags), + } + if in.ActionRequired != nil { + learning.ActionRequired = *in.ActionRequired + } + if project != nil { + learning.ProjectID = &project.ID + } + + created, err := t.store.CreateLearning(ctx, learning) + if err != nil { + return nil, AddLearningOutput{}, err + } + return nil, AddLearningOutput{Learning: created}, nil +} + +func (t *LearningsTool) Get(ctx context.Context, _ *mcp.CallToolRequest, in GetLearningInput) (*mcp.CallToolResult, GetLearningOutput, error) { + learning, err := t.store.GetLearning(ctx, in.ID) + if err != nil { + return nil, GetLearningOutput{}, err + } + return nil, GetLearningOutput{Learning: learning}, nil +} + +func (t *LearningsTool) List(ctx context.Context, req *mcp.CallToolRequest, in ListLearningsInput) (*mcp.CallToolResult, ListLearningsOutput, error) { + project, err := resolveProject(ctx, t.store, t.sessions, req, in.Project, false) + if err != nil { + return nil, ListLearningsOutput{}, err + } + + filter := thoughttypes.LearningFilter{ + Limit: normalizeLimit(in.Limit, t.cfg), + Category: strings.TrimSpace(in.Category), + Area: strings.TrimSpace(in.Area), + Status: strings.TrimSpace(in.Status), + Priority: strings.TrimSpace(in.Priority), + Tag: strings.TrimSpace(in.Tag), + Query: strings.TrimSpace(in.Query), + } + if project != nil { + filter.ProjectID = &project.ID + } + + items, err := t.store.ListLearnings(ctx, filter) + if err != nil { + return nil, ListLearningsOutput{}, err + } + return nil, ListLearningsOutput{Learnings: items}, nil +} + +func defaultString(value string, fallback string) string { + if value == "" { + return fallback + } + return value +} + +func normalizeStringSlice(values []string) []string { + if len(values) == 0 { + return []string{} + } + out := make([]string, 0, len(values)) + seen := map[string]struct{}{} + for _, value := range values { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + continue + } + if _, ok := seen[trimmed]; ok { + continue + } + seen[trimmed] = struct{}{} + out = append(out, trimmed) + } + return out +} diff --git a/internal/types/learning.go b/internal/types/learning.go new file mode 100644 index 0000000..f0b986a --- /dev/null +++ b/internal/types/learning.go @@ -0,0 +1,68 @@ +package types + +import ( + "time" + + "github.com/google/uuid" +) + +type LearningEvidenceLevel string + +const ( + LearningEvidenceHypothesis LearningEvidenceLevel = "hypothesis" + LearningEvidenceObserved LearningEvidenceLevel = "observed" + LearningEvidenceVerified LearningEvidenceLevel = "verified" +) + +type LearningStatus string + +const ( + LearningStatusPending LearningStatus = "pending" + LearningStatusInProgress LearningStatus = "in_progress" + LearningStatusResolved LearningStatus = "resolved" + LearningStatusWontFix LearningStatus = "wont_fix" + LearningStatusPromoted LearningStatus = "promoted" +) + +type LearningPriority string + +const ( + LearningPriorityLow LearningPriority = "low" + LearningPriorityMedium LearningPriority = "medium" + LearningPriorityHigh LearningPriority = "high" +) + +type Learning struct { + ID uuid.UUID `json:"id"` + Summary string `json:"summary"` + Details string `json:"details"` + Category string `json:"category"` + Area string `json:"area"` + Status LearningStatus `json:"status"` + Priority LearningPriority `json:"priority"` + Confidence LearningEvidenceLevel `json:"confidence"` + ActionRequired bool `json:"action_required"` + SourceType string `json:"source_type,omitempty"` + SourceRef string `json:"source_ref,omitempty"` + ProjectID *uuid.UUID `json:"project_id,omitempty"` + RelatedThoughtID *uuid.UUID `json:"related_thought_id,omitempty"` + RelatedSkillID *uuid.UUID `json:"related_skill_id,omitempty"` + ReviewedBy *string `json:"reviewed_by,omitempty"` + ReviewedAt *time.Time `json:"reviewed_at,omitempty"` + DuplicateOfLearningID *uuid.UUID `json:"duplicate_of_learning_id,omitempty"` + SupersedesLearningID *uuid.UUID `json:"supersedes_learning_id,omitempty"` + Tags []string `json:"tags"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +type LearningFilter struct { + Limit int + ProjectID *uuid.UUID + Category string + Area string + Status string + Priority string + Tag string + Query string +} From 8e74dc92848d8a468115aae516e4e813b43f2ee4 Mon Sep 17 00:00:00 2001 From: Hein Date: Wed, 22 Apr 2026 15:14:36 +0200 Subject: [PATCH 13/15] ci: add module tidy step to CI workflow --- .gitea/workflows/ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitea/workflows/ci.yml b/.gitea/workflows/ci.yml index 09d1db7..0ce86d1 100644 --- a/.gitea/workflows/ci.yml +++ b/.gitea/workflows/ci.yml @@ -31,6 +31,9 @@ jobs: - name: Download dependencies run: go mod download + - name: Tidy modules + run: go mod tidy + - name: Run tests run: go test ./... From 20122a5f536d2b2b296ee16546b40ffe79084c12 Mon Sep 17 00:00:00 2001 From: sgcommand Date: Wed, 22 Apr 2026 23:03:58 +0200 Subject: [PATCH 14/15] feat(ui): add origin-style admin shell scaffold --- ui/package.json | 11 +- ui/pnpm-lock.yaml | 3150 ++++++++++++++++++++++++++++++++++++++++++ ui/src/App.svelte | 543 +++++--- ui/src/shellState.ts | 46 + 4 files changed, 3541 insertions(+), 209 deletions(-) create mode 100644 ui/src/shellState.ts diff --git a/ui/package.json b/ui/package.json index 1c4692f..533c3dd 100644 --- a/ui/package.json +++ b/ui/package.json @@ -19,5 +19,14 @@ "tailwindcss": "^4.1.4", "typescript": "^5.8.3", "vite": "^6.3.2" + }, + "dependencies": { + "@sentry/svelte": "^10.49.0", + "@skeletonlabs/skeleton": "^4.15.2", + "@skeletonlabs/skeleton-svelte": "^4.15.2", + "@tanstack/svelte-virtual": "^3.13.24", + "@warkypublic/artemis-kit": "file:../../artemis-kit", + "@warkypublic/resolvespec-js": "^1.0.1", + "@warkypublic/svelix": "^0.1.31" } -} \ No newline at end of file +} diff --git a/ui/pnpm-lock.yaml b/ui/pnpm-lock.yaml index 9e0c99a..b89adb6 100644 --- a/ui/pnpm-lock.yaml +++ b/ui/pnpm-lock.yaml @@ -7,6 +7,28 @@ settings: importers: .: + dependencies: + '@sentry/svelte': + specifier: ^10.49.0 + version: 10.49.0(svelte@5.55.1) + '@skeletonlabs/skeleton': + specifier: ^4.15.2 + version: 4.15.2(tailwindcss@4.2.2) + '@skeletonlabs/skeleton-svelte': + specifier: ^4.15.2 + version: 4.15.2(svelte@5.55.1) + '@tanstack/svelte-virtual': + specifier: ^3.13.24 + version: 3.13.24(svelte@5.55.1) + '@warkypublic/artemis-kit': + specifier: file:../../artemis-kit + version: file:../../artemis-kit + '@warkypublic/resolvespec-js': + specifier: ^1.0.1 + version: 1.0.1 + '@warkypublic/svelix': + specifier: ^0.1.31 + version: 0.1.37(highlight.js@11.8.0)(svelte@5.55.1)(unified@11.0.5) devDependencies: '@sveltejs/vite-plugin-svelte': specifier: ^5.0.3 @@ -35,6 +57,93 @@ importers: packages: + '@asamuzakjp/css-color@5.1.11': + resolution: {integrity: sha512-KVw6qIiCTUQhByfTd78h2yD1/00waTmm9uy/R7Ck/ctUyAPj+AEDLkQIdJW0T8+qGgj3j5bpNKK7Q3G+LedJWg==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + + '@asamuzakjp/dom-selector@7.1.1': + resolution: {integrity: sha512-67RZDnYRc8H/8MLDgQCDE//zoqVFwajkepHZgmXrbwybzXOEwOWGPYGmALYl9J2DOLfFPPs6kKCqmbzV895hTQ==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + + '@asamuzakjp/generational-cache@1.0.1': + resolution: {integrity: sha512-wajfB8KqzMCN2KGNFdLkReeHncd0AslUSrvHVvvYWuU8ghncRJoA50kT3zP9MVL0+9g4/67H+cdvBskj9THPzg==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + + '@asamuzakjp/nwsapi@2.3.9': + resolution: {integrity: sha512-n8GuYSrI9bF7FFZ/SjhwevlHc8xaVlb/7HmHelnc/PZXBD2ZR49NnN9sMMuDdEGPeeRQ5d0hqlSlEpgCX3Wl0Q==} + + '@bramus/specificity@2.4.2': + resolution: {integrity: sha512-ctxtJ/eA+t+6q2++vj5j7FYX3nRu311q1wfYH3xjlLOsczhlhxAg2FWNUXhpGvAw3BWo1xBcvOV6/YLc2r5FJw==} + hasBin: true + + '@cartamd/plugin-anchor@2.2.0': + resolution: {integrity: sha512-kbo0/UyIwgRaUEUqH1tKtyQg6POisjefoohWvIRPg5YAHtSuQ0dS4YhnAAeLTiKia8wM3oVUYON7fm45J1p5yQ==} + peerDependencies: + carta-md: ^4.0.0 + + '@cartamd/plugin-attachment@4.2.0': + resolution: {integrity: sha512-KdQBfcFdBlr7WVrZoCkf6KinIpb2ZsFP5TqZa+K4fNPEVMgLngbODs9l83zmgqEdI8YdDcsTEONMaHUa/qH3Og==} + peerDependencies: + carta-md: ^4.0.0 + + '@cartamd/plugin-code@4.2.0': + resolution: {integrity: sha512-rqv/Ol0kZQ4Z3+RmvT9l+CqkdYCDaDBO4ExpYu8+KmP3pgKWSFaJ2Cv1maZFZLGqUF6rWFPd1AcjeCxwY5nCWQ==} + peerDependencies: + carta-md: ^4.9.0 + + '@cartamd/plugin-component@1.1.1': + resolution: {integrity: sha512-eQaBCdHUppCMt42QSJTR2Jo7wS/ywENLJ5op6Rt5lHwxNYc7+BDXKSSE2RyXc/PWuvgWzpVR7db1Eh19lRzxBg==} + peerDependencies: + carta-md: ^4.0.0 + unified: ^11.0.0 + + '@cartamd/plugin-emoji@4.3.0': + resolution: {integrity: sha512-D73qZP/55er1b08CVmBA385omOY9/88zwB1JKBdhEFmOZzYfwr42GiyGHcHpcRinKe/MiaWV4k8vEYj461/wOQ==} + peerDependencies: + carta-md: ^4.0.0 + + '@cartamd/plugin-math@4.3.1': + resolution: {integrity: sha512-C1/LC3KNu9r46RJSUdOBiYInmvNOeUYiCY08wR3ygcXxnPhSGlaOHAj5/zRuEsr7O/Uml3650bAsDPnS3+CdsA==} + peerDependencies: + carta-md: ^4.9.0 + svelte: ^5.0.0 + + '@csstools/color-helpers@6.0.2': + resolution: {integrity: sha512-LMGQLS9EuADloEFkcTBR3BwV/CGHV7zyDxVRtVDTwdI2Ca4it0CCVTT9wCkxSgokjE5Ho41hEPgb8OEUwoXr6Q==} + engines: {node: '>=20.19.0'} + + '@csstools/css-calc@3.2.0': + resolution: {integrity: sha512-bR9e6o2BDB12jzN/gIbjHa5wLJ4UjD1CB9pM7ehlc0ddk6EBz+yYS1EV2MF55/HUxrHcB/hehAyt5vhsA3hx7w==} + engines: {node: '>=20.19.0'} + peerDependencies: + '@csstools/css-parser-algorithms': ^4.0.0 + '@csstools/css-tokenizer': ^4.0.0 + + '@csstools/css-color-parser@4.1.0': + resolution: {integrity: sha512-U0KhLYmy2GVj6q4T3WaAe6NPuFYCPQoE3b0dRGxejWDgcPp8TP7S5rVdM5ZrFaqu4N67X8YaPBw14dQSYx3IyQ==} + engines: {node: '>=20.19.0'} + peerDependencies: + '@csstools/css-parser-algorithms': ^4.0.0 + '@csstools/css-tokenizer': ^4.0.0 + + '@csstools/css-parser-algorithms@4.0.0': + resolution: {integrity: sha512-+B87qS7fIG3L5h3qwJ/IFbjoVoOe/bpOdh9hAjXbvx0o8ImEmUsGXN0inFOnk2ChCFgqkkGFQ+TpM5rbhkKe4w==} + engines: {node: '>=20.19.0'} + peerDependencies: + '@csstools/css-tokenizer': ^4.0.0 + + '@csstools/css-syntax-patches-for-csstree@1.1.3': + resolution: {integrity: sha512-SH60bMfrRCJF3morcdk57WklujF4Jr/EsQUzqkarfHXEFcAR1gg7fS/chAE922Sehgzc1/+Tz5H3Ypa1HiEKrg==} + peerDependencies: + css-tree: ^3.2.1 + peerDependenciesMeta: + css-tree: + optional: true + + '@csstools/css-tokenizer@4.0.0': + resolution: {integrity: sha512-QxULHAm7cNu72w97JUNCBFODFaXpbDg+dP8b/oWFAZ2MTRppA3U00Y2L1HqaS4J6yBqxwa/Y3nMBaxVKbB/NsA==} + engines: {node: '>=20.19.0'} + '@esbuild/aix-ppc64@0.25.12': resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==} engines: {node: '>=18'} @@ -191,6 +300,35 @@ packages: cpu: [x64] os: [win32] + '@exodus/bytes@1.15.0': + resolution: {integrity: sha512-UY0nlA+feH81UGSHv92sLEPLCeZFjXOuHhrIo0HQydScuQc8s0A7kL/UdgwgDq8g8ilksmuoF35YVTNphV2aBQ==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + peerDependencies: + '@noble/hashes': ^1.8.0 || ^2.0.0 + peerDependenciesMeta: + '@noble/hashes': + optional: true + + '@floating-ui/core@1.7.5': + resolution: {integrity: sha512-1Ih4WTWyw0+lKyFMcBHGbb5U5FtuHJuujoyyr5zTaWS5EYMeT6Jb2AuDeftsCsEuchO+mM2ij5+q9crhydzLhQ==} + + '@floating-ui/dom@1.7.6': + resolution: {integrity: sha512-9gZSAI5XM36880PPMm//9dfiEngYoC6Am2izES1FF406YFsjvyBMmeJ2g4SAju3xWwtuynNRFL2s9hgxpLI5SQ==} + + '@floating-ui/utils@0.2.11': + resolution: {integrity: sha512-RiB/yIh78pcIxl6lLMG0CgBXAZ2Y0eVHqMPYugu+9U0AeT6YBeiJpf7lbdJNIugFP5SIjwNRgo4DhR1Qxi26Gg==} + + '@friendofsvelte/tipex@0.1.1': + resolution: {integrity: sha512-p+EgF9wHa1q1bkLhoFPmJ6KTJIdR5aWapzC+wFeLH0h2xzv8uMxmYsvydxS011HKs9s6rqYKIDQi7Ckj4uOVhQ==} + peerDependencies: + svelte: ^5.0.0 + + '@iconify/types@2.0.0': + resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==} + + '@internationalized/date@3.12.0': + resolution: {integrity: sha512-/PyIMzK29jtXaGU23qTvNZxvBXRtKbNnGDFD+PY6CZw/Y8Ex8pFUzkuCJCG9aOqmShjqhS9mPqP6Dk5onQY8rQ==} + '@jridgewell/gen-mapping@0.3.13': resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} @@ -207,6 +345,16 @@ packages: '@jridgewell/trace-mapping@0.3.31': resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + '@js-temporal/polyfill@0.5.1': + resolution: {integrity: sha512-hloP58zRVCRSpgDxmqCWJNlizAlUgJFqG2ypq79DCvyv9tHjRYMDOcPFjzfl/A1/YxDvRCZz8wvZvmapQnKwFQ==} + engines: {node: '>=12'} + + '@popperjs/core@2.11.8': + resolution: {integrity: sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==} + + '@remirror/core-constants@3.0.0': + resolution: {integrity: sha512-42aWfPrimMfDKDi4YegyS7x+/0tlzaqwPQCULLanv3DMIlu96KTJR0fM5isWX2UViOqlGnX6YFgqWepcX+XMNg==} + '@rollup/rollup-android-arm-eabi@4.60.1': resolution: {integrity: sha512-d6FinEBLdIiK+1uACUttJKfgZREXrF0Qc2SmLII7W2AD8FfiZ9Wjd+rD/iRuf5s5dWrr1GgwXCvPqOuDquOowA==} cpu: [arm] @@ -345,6 +493,113 @@ packages: cpu: [x64] os: [win32] + '@sentry-internal/browser-utils@10.49.0': + resolution: {integrity: sha512-n0QRx0Ysx6mPfIydTkz7VP0FmwM+/EqMZiRqdsU3aTYsngE9GmEDV0OL1bAy6a8N/C1xf9vntkuAtj6N/8Z51w==} + engines: {node: '>=18'} + + '@sentry-internal/feedback@10.49.0': + resolution: {integrity: sha512-JNsUBGv0faCFE7MeZUH99Y9lU9qq3LBALbLxpE1x7ngNrQnVYRlcFgdqaD/btNBKr8awjYL8gmcSkHBWskGqLQ==} + engines: {node: '>=18'} + + '@sentry-internal/replay-canvas@10.49.0': + resolution: {integrity: sha512-7D/NrgH1Qwx5trDYaaTSSJmCb1yVQQLqFG4G/S9x2ltzl9876lSGJL8UeW8ReNQgF3CDAcwbmm/9aXaVSBUNZA==} + engines: {node: '>=18'} + + '@sentry-internal/replay@10.49.0': + resolution: {integrity: sha512-IEy4lwHVMiRE3JAcn+kFKjsTgalDOCSTf20SoFd+nkt6rN/k1RDyr4xpdfF//Kj3UdeTmbuibYjK5H/FLhhnGg==} + engines: {node: '>=18'} + + '@sentry/browser@10.49.0': + resolution: {integrity: sha512-bGCHc+wK2Dx67YoSbmtlt04alqWfQ+dasD/GVipVOq50gvw/BBIDHTEWRJEjACl+LrvszeY54V+24p8z4IgysA==} + engines: {node: '>=18'} + + '@sentry/core@10.49.0': + resolution: {integrity: sha512-UaFeum3LUM1mB0d67jvKnqId1yWQjyqmaDV6kWngG03x+jqXb08tJdGpSoxjXZe13jFBbiBL/wKDDYIK7rCK4g==} + engines: {node: '>=18'} + + '@sentry/svelte@10.49.0': + resolution: {integrity: sha512-onQ+dpvjn1impT72Lsp0I0i2C5796pxOY+MyH3BYd139os+8uskatzYZddBTe+r36t8+M0gWk5PQqftcvAaFwQ==} + engines: {node: '>=18'} + peerDependencies: + svelte: 3.x || 4.x || 5.x + + '@shikijs/core@3.23.0': + resolution: {integrity: sha512-NSWQz0riNb67xthdm5br6lAkvpDJRTgB36fxlo37ZzM2yq0PQFFzbd8psqC2XMPgCzo1fW6cVi18+ArJ44wqgA==} + + '@shikijs/engine-javascript@3.23.0': + resolution: {integrity: sha512-aHt9eiGFobmWR5uqJUViySI1bHMqrAgamWE1TYSUoftkAeCCAiGawPMwM+VCadylQtF4V3VNOZ5LmfItH5f3yA==} + + '@shikijs/engine-oniguruma@3.23.0': + resolution: {integrity: sha512-1nWINwKXxKKLqPibT5f4pAFLej9oZzQTsby8942OTlsJzOBZ0MWKiwzMsd+jhzu8YPCHAswGnnN1YtQfirL35g==} + + '@shikijs/langs@3.23.0': + resolution: {integrity: sha512-2Ep4W3Re5aB1/62RSYQInK9mM3HsLeB91cHqznAJMuylqjzNVAVCMnNWRHFtcNHXsoNRayP9z1qj4Sq3nMqYXg==} + + '@shikijs/rehype@3.23.0': + resolution: {integrity: sha512-GepKJxXHbXFfAkiZZZ+4V7x71Lw3s0ALYmydUxJRdvpKjSx9FOMSaunv6WRLFBXR6qjYerUq1YZQno+2gLEPwA==} + + '@shikijs/themes@3.23.0': + resolution: {integrity: sha512-5qySYa1ZgAT18HR/ypENL9cUSGOeI2x+4IvYJu4JgVJdizn6kG4ia5Q1jDEOi7gTbN4RbuYtmHh0W3eccOrjMA==} + + '@shikijs/types@3.23.0': + resolution: {integrity: sha512-3JZ5HXOZfYjsYSk0yPwBrkupyYSLpAE26Qc0HLghhZNGTZg/SKxXIIgoxOpmmeQP0RRSDJTk1/vPfw9tbw+jSQ==} + + '@shikijs/vscode-textmate@10.0.2': + resolution: {integrity: sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==} + + '@sindresorhus/is@4.6.0': + resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} + engines: {node: '>=10'} + + '@skeletonlabs/skeleton-common@4.15.2': + resolution: {integrity: sha512-y7KZn++Av8UHdoeaaguQ7zIS1HlK7Y5jyPDnHy65wJ60iS97fMtQV0kGhqVoYCWhor+xrjbAFjWtaPOPPDEMYA==} + + '@skeletonlabs/skeleton-svelte@4.15.2': + resolution: {integrity: sha512-vZkRhR701EOHVXVKh/NFRSY8D9LPBvmdNFdtfgqO7TEg77sypJUvERl2dxErTLY1QjVklVpjsHRCTaSLn+1Ntg==} + peerDependencies: + svelte: ^5.29.0 + + '@skeletonlabs/skeleton@4.15.2': + resolution: {integrity: sha512-5O23Py76nw56aoieV2b2T7MJ6xS0DwDjUULpwLnCXxXOnmzADEoWoQxb/ABbqg04IMOygtRzK3HO22I1+kFsog==} + peerDependencies: + tailwindcss: ^4.0.0 + + '@svar-ui/core-locales@2.5.1': + resolution: {integrity: sha512-3VYDf/zdFVPKb8Zyf6u5IjM089WGOtNswPG8KT4KZNEoWiVIHblCJp1uXfzHERTFikNSVjllaT44S7M9nOhRsw==} + + '@svar-ui/grid-data-provider@2.6.2': + resolution: {integrity: sha512-wFmxuaWsodJvgCMyRIBm29X4ETSLxxZ1sKqw16TRt+THlulO7TB9bwCBzUAS46jhgtMahXT1Kyi3Yc4CfzLnIQ==} + + '@svar-ui/grid-locales@2.6.2': + resolution: {integrity: sha512-K8HKXly8t0imBJC23e/ZRa+t3jLGf+S+QW4ryKoqVwhdqOi3ugndDPj5mYfNNpZEwox7aWhdLBiakP+QMs2ueA==} + + '@svar-ui/grid-store@2.6.2': + resolution: {integrity: sha512-M7VsTCAvpyzpE46lA0YN1ZQfp09o1O6TszKnVCuB7Fu87w4oB4NtkNKdoI8TXDEYY3z7MGv6eHQ3c/TXpsq1eQ==} + + '@svar-ui/lib-data-provider@1.7.2': + resolution: {integrity: sha512-Od93Iiy0vBwVzgJm25WdizH3d4Xlli/HAA2kqoUj1pzeZ2SWBeFzvE59iihQz8NuIlfAtQJ6MMe168458T+2lQ==} + + '@svar-ui/lib-dom@0.12.1': + resolution: {integrity: sha512-srvDleQqR6stBBDijCgzDHMFTTffI6sv3XCpNPKmurrnE83L1MDNqezqg6j3+0dUVPd4AAEGOez2DvK6byblTQ==} + + '@svar-ui/lib-state@1.9.6': + resolution: {integrity: sha512-twoHa8wZvzdxtES68s6Wnp4Xi1WivScNaFFK6sJ6WDgqVYOzJw0ZhVVrdwHQgieLITTKl/DYh/U3nuqtMetKYw==} + + '@svar-ui/lib-svelte@0.5.2': + resolution: {integrity: sha512-i+hHMT3zctaGbRpyMDREt9im7YfLpvGwFYvH1ntEp/kYAwUrR/sxMJlEEKIid+Yg2EeMUstGxmYQbkWoMpPG7A==} + + '@svar-ui/svelte-core@2.5.1': + resolution: {integrity: sha512-snXNEYhWkg+7JQmhsS3xnN6UozfTj8+cQadKl3dsUyh3328F6NZZpKbUkXNArK95+OcueJZUXyExkDWv1NqmDA==} + + '@svar-ui/svelte-grid@2.6.2': + resolution: {integrity: sha512-iFOoYBZQnXMQUx2Ko80sjcDGBWvmKQSHb/pYesdYEewgvF5w3ZeUc0qG3FX/7EWthKADoIACzdMLVHDRY9j1ow==} + + '@svar-ui/svelte-menu@2.5.1': + resolution: {integrity: sha512-2ysDlaWR9FrLPRjCd/KB9L9FQry6xR06krAGKPvfFeueXIo3ySuRks6lniNzbLMvKWHG6VOzrBlAAfn0FTeipw==} + + '@svar-ui/svelte-toolbar@2.5.1': + resolution: {integrity: sha512-QAomfCKtX4tqFLLid/H/U5STqVwrFi5FKPGssDHO55rYlyk+q/v6TfkO3qJYQlFEveui7IgaoWHY9lGxIn8tXg==} + '@sveltejs/acorn-typescript@1.0.9': resolution: {integrity: sha512-lVJX6qEgs/4DOcRTpo56tmKzVPtoWAaVbL4hfO7t7NVwl9AAXzQR6cihesW1BmNMPl+bK6dreu2sOKBP2Q9CIA==} peerDependencies: @@ -365,6 +620,9 @@ packages: svelte: ^5.0.0 vite: ^6.0.0 + '@swc/helpers@0.5.21': + resolution: {integrity: sha512-jI/VAmtdjB/RnI8GTnokyX7Ug8c+g+ffD6QRLa6XQewtnGyukKkKSk3wLTM3b5cjt1jNh9x0jfVlagdN2gDKQg==} + '@tailwindcss/node@4.2.2': resolution: {integrity: sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA==} @@ -459,24 +717,394 @@ packages: peerDependencies: vite: ^5.2.0 || ^6 || ^7 || ^8 + '@tanstack/svelte-virtual@3.13.24': + resolution: {integrity: sha512-Up3LOD5Cj+oJ3GuKfM1Li06jzzZMIZnRPmu3aik9rJQgk7jq7LgPo4yumfUw4+I4edjYfyPKSZnXGwZ9Vjlebw==} + peerDependencies: + svelte: ^3.48.0 || ^4.0.0 || ^5.0.0 + + '@tanstack/virtual-core@3.14.0': + resolution: {integrity: sha512-JLANqGy/D6k4Ujmh8Tr25lGimuOXNiaVyXaCAZS0W+1390sADdGnyUdSWNIfd49gebtIxGMij4IktRVzrdr12Q==} + + '@tiptap/core@2.27.2': + resolution: {integrity: sha512-ABL1N6eoxzDzC1bYvkMbvyexHacszsKdVPYqhl5GwHLOvpZcv9VE9QaKwDILTyz5voCA0lGcAAXZp+qnXOk5lQ==} + peerDependencies: + '@tiptap/pm': ^2.7.0 + + '@tiptap/extension-blockquote@2.27.2': + resolution: {integrity: sha512-oIGZgiAeA4tG3YxbTDfrmENL4/CIwGuP3THtHsNhwRqwsl9SfMk58Ucopi2GXTQSdYXpRJ0ahE6nPqB5D6j/Zw==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-bold@2.27.2': + resolution: {integrity: sha512-bR7J5IwjCGQ0s3CIxyMvOCnMFMzIvsc5OVZKscTN5UkXzFsaY6muUAIqtKxayBUucjtUskm5qZowJITCeCb1/A==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-bullet-list@2.27.2': + resolution: {integrity: sha512-gmFuKi97u5f8uFc/GQs+zmezjiulZmFiDYTh3trVoLRoc2SAHOjGEB7qxdx7dsqmMN7gwiAWAEVurLKIi1lnnw==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-code-block-lowlight@2.27.2': + resolution: {integrity: sha512-v6NKStBbQ/XCc1NnCi3ObsL1DsxadSIBtUQNA/B+urkPgn5LEy72HAGlf0xwjRaNkAGSaTASLKmc84L5q5zlGQ==} + peerDependencies: + '@tiptap/core': ^2.7.0 + '@tiptap/extension-code-block': ^2.7.0 + '@tiptap/pm': ^2.7.0 + highlight.js: ^11 + lowlight: ^2 || ^3 + + '@tiptap/extension-code-block@2.27.2': + resolution: {integrity: sha512-KgvdQHS4jXr79aU3wZOGBIZYYl9vCB7uDEuRFV4so2rYrfmiYMw3T8bTnlNEEGe4RUeAms1i4fdwwvQp9nR1Dw==} + peerDependencies: + '@tiptap/core': ^2.7.0 + '@tiptap/pm': ^2.7.0 + + '@tiptap/extension-code@2.27.2': + resolution: {integrity: sha512-7X9AgwqiIGXoZX7uvdHQsGsjILnN/JaEVtqfXZnPECzKGaWHeK/Ao4sYvIIIffsyZJA8k5DC7ny2/0sAgr2TuA==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-document@2.27.2': + resolution: {integrity: sha512-CFhAYsPnyYnosDC4639sCJnBUnYH4Cat9qH5NZWHVvdgtDwu8GZgZn2eSzaKSYXWH1vJ9DSlCK+7UyC3SNXIBA==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-dropcursor@2.27.2': + resolution: {integrity: sha512-oEu/OrktNoQXq1x29NnH/GOIzQZm8ieTQl3FK27nxfBPA89cNoH4mFEUmBL5/OFIENIjiYG3qWpg6voIqzswNw==} + peerDependencies: + '@tiptap/core': ^2.7.0 + '@tiptap/pm': ^2.7.0 + + '@tiptap/extension-floating-menu@2.27.2': + resolution: {integrity: sha512-GUN6gPIGXS7ngRJOwdSmtBRBDt9Kt9CM/9pSwKebhLJ+honFoNA+Y6IpVyDvvDMdVNgBchiJLs6qA5H97gAePQ==} + peerDependencies: + '@tiptap/core': ^2.7.0 + '@tiptap/pm': ^2.7.0 + + '@tiptap/extension-gapcursor@2.27.2': + resolution: {integrity: sha512-/c9VF1HBxj+AP54XGVgCmD9bEGYc5w5OofYCFQgM7l7PB1J00A4vOke0oPkHJnqnOOyPlFaxO/7N6l3XwFcnKA==} + peerDependencies: + '@tiptap/core': ^2.7.0 + '@tiptap/pm': ^2.7.0 + + '@tiptap/extension-hard-break@2.27.2': + resolution: {integrity: sha512-kSRVGKlCYK6AGR0h8xRkk0WOFGXHIIndod3GKgWU49APuIGDiXd8sziXsSlniUsWmqgDmDXcNnSzPcV7AQ8YNg==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-heading@2.27.2': + resolution: {integrity: sha512-iM3yeRWuuQR/IRQ1djwNooJGfn9Jts9zF43qZIUf+U2NY8IlvdNsk2wTOdBgh6E0CamrStPxYGuln3ZS4fuglw==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-history@2.27.2': + resolution: {integrity: sha512-+hSyqERoFNTWPiZx4/FCyZ/0eFqB9fuMdTB4AC/q9iwu3RNWAQtlsJg5230bf/qmyO6bZxRUc0k8p4hrV6ybAw==} + peerDependencies: + '@tiptap/core': ^2.7.0 + '@tiptap/pm': ^2.7.0 + + '@tiptap/extension-horizontal-rule@2.27.2': + resolution: {integrity: sha512-WGWUSgX+jCsbtf9Y9OCUUgRZYuwjVoieW5n6mAUohJ9/6gc6sGIOrUpBShf+HHo6WD+gtQjRd+PssmX3NPWMpg==} + peerDependencies: + '@tiptap/core': ^2.7.0 + '@tiptap/pm': ^2.7.0 + + '@tiptap/extension-image@2.27.2': + resolution: {integrity: sha512-5zL/BY41FIt72azVrCrv3n+2YJ/JyO8wxCcA4Dk1eXIobcgVyIdo4rG39gCqIOiqziAsqnqoj12QHTBtHsJ6mQ==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-italic@2.27.2': + resolution: {integrity: sha512-1OFsw2SZqfaqx5Fa5v90iNlPRcqyt+lVSjBwTDzuPxTPFY4Q0mL89mKgkq2gVHYNCiaRkXvFLDxaSvBWbmthgg==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-link@2.27.2': + resolution: {integrity: sha512-bnP61qkr0Kj9Cgnop1hxn2zbOCBzNtmawxr92bVTOE31fJv6FhtCnQiD6tuPQVGMYhcmAj7eihtvuEMFfqEPcQ==} + peerDependencies: + '@tiptap/core': ^2.7.0 + '@tiptap/pm': ^2.7.0 + + '@tiptap/extension-list-item@2.27.2': + resolution: {integrity: sha512-eJNee7IEGXMnmygM5SdMGDC8m/lMWmwNGf9fPCK6xk0NxuQRgmZHL6uApKcdH6gyNcRPHCqvTTkhEP7pbny/fg==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-ordered-list@2.27.2': + resolution: {integrity: sha512-M7A4tLGJcLPYdLC4CI2Gwl8LOrENQW59u3cMVa+KkwG1hzSJyPsbDpa1DI6oXPC2WtYiTf22zrbq3gVvH+KA2w==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-paragraph@2.27.2': + resolution: {integrity: sha512-elYVn2wHJJ+zB9LESENWOAfI4TNT0jqEN34sMA/hCtA4im1ZG2DdLHwkHIshj/c4H0dzQhmsS/YmNC5Vbqab/A==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-placeholder@2.27.2': + resolution: {integrity: sha512-IjsgSVYJRjpAKmIoapU0E2R4E2FPY3kpvU7/1i7PUYisylqejSJxmtJPGYw0FOMQY9oxnEEvfZHMBA610tqKpg==} + peerDependencies: + '@tiptap/core': ^2.7.0 + '@tiptap/pm': ^2.7.0 + + '@tiptap/extension-strike@2.27.2': + resolution: {integrity: sha512-HHIjhafLhS2lHgfAsCwC1okqMsQzR4/mkGDm4M583Yftyjri1TNA7lzhzXWRFWiiMfJxKtdjHjUAQaHuteRTZw==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-task-item@2.27.2': + resolution: {integrity: sha512-ZBSqj/dygB/Rp5K9qOxRVwASTZCmKVoTq8C59KvMgD/aFjJxhq/w2dZaWkCUEXEep+NmvJqo0kfeAEMY5UDnGg==} + peerDependencies: + '@tiptap/core': ^2.7.0 + '@tiptap/pm': ^2.7.0 + + '@tiptap/extension-task-list@2.27.2': + resolution: {integrity: sha512-5nupAewdzZ9F3599oAcaK0WkDH04wdACAVBPM4zG7InlIpkbho3txB7zWmm64OxfhCMIMGKiXY1q0bw9i0QBGQ==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-text-style@2.27.2': + resolution: {integrity: sha512-Omk+uxjJLyEY69KStpCw5fA9asvV+MGcAX2HOxyISDFoLaL49TMrNjhGAuz09P1L1b0KGXo4ml7Q3v/Lfy4WPA==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-text@2.27.2': + resolution: {integrity: sha512-Xk7nYcigljAY0GO9hAQpZ65ZCxqOqaAlTPDFcKerXmlkQZP/8ndx95OgUb1Xf63kmPOh3xypurGS2is3v0MXSA==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/extension-underline@2.27.2': + resolution: {integrity: sha512-gPOsbAcw1S07ezpAISwoO8f0RxpjcSH7VsHEFDVuXm4ODE32nhvSinvHQjv2icRLOXev+bnA7oIBu7Oy859gWQ==} + peerDependencies: + '@tiptap/core': ^2.7.0 + + '@tiptap/pm@2.27.2': + resolution: {integrity: sha512-kaEg7BfiJPDQMKbjVIzEPO3wlcA+pZb2tlcK9gPrdDnEFaec2QTF1sXz2ak2IIb2curvnIrQ4yrfHgLlVA72wA==} + + '@tiptap/starter-kit@2.27.2': + resolution: {integrity: sha512-bb0gJvPoDuyRUQ/iuN52j1//EtWWttw+RXAv1uJxfR0uKf8X7uAqzaOOgwjknoCIDC97+1YHwpGdnRjpDkOBxw==} + + '@types/debug@4.1.13': + resolution: {integrity: sha512-KSVgmQmzMwPlmtljOomayoR89W4FynCAi3E8PPs7vmDVPe84hT+vGPKkJfThkmXs0x0jAaa9U8uW8bbfyS2fWw==} + '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + '@types/hast@2.3.10': + resolution: {integrity: sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==} + + '@types/hast@3.0.4': + resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==} + + '@types/katex@0.16.8': + resolution: {integrity: sha512-trgaNyfU+Xh2Tc+ABIb44a5AYUpicB3uwirOioeOkNPPbmgRNtcWyDeeFRzjPZENO9Vq8gvVqfhaaXWLlevVwg==} + + '@types/linkify-it@5.0.0': + resolution: {integrity: sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==} + + '@types/markdown-it@14.1.2': + resolution: {integrity: sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==} + + '@types/mdast@4.0.4': + resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} + + '@types/mdurl@2.0.0': + resolution: {integrity: sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==} + + '@types/ms@2.1.0': + resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} + '@types/node@24.12.2': resolution: {integrity: sha512-A1sre26ke7HDIuY/M23nd9gfB+nrmhtYyMINbjI1zHJxYteKR6qSMX56FsmjMcDb3SMcjJg5BiRRgOCC/yBD0g==} '@types/trusted-types@2.0.7': resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==} + '@types/unist@2.0.11': + resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==} + + '@types/unist@3.0.3': + resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==} + '@typescript-eslint/types@8.58.0': resolution: {integrity: sha512-O9CjxypDT89fbHxRfETNoAnHj/i6IpRK0CvbVN3qibxlLdo5p5hcLmUuCCrHMpxiWSwKyI8mCP7qRNYuOJ0Uww==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@ungap/structured-clone@1.3.0': + resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} + + '@warkypublic/artemis-kit@1.0.10': + resolution: {integrity: sha512-qIgjcWqLyYfoKDUYt3Gm7PVe2S4AdjA46J1jPIff1p6wUP5WsHA8UfZq7pEdP6YNxqavv+h84oe1+HsJOoU6jQ==} + engines: {node: '>=14.16'} + + '@warkypublic/artemis-kit@file:../../artemis-kit': + resolution: {directory: ../../artemis-kit, type: directory} + engines: {node: '>=14.16'} + + '@warkypublic/resolvespec-js@1.0.1': + resolution: {integrity: sha512-uXP1HouxpOKXfwE6qpy0gCcrMPIgjDT53aVGkfork4QejRSunbKWSKKawW2nIm7RnyFhSjPILMXcnT5xUiXOew==} + engines: {node: '>=18'} + + '@warkypublic/svelix@0.1.37': + resolution: {integrity: sha512-OZ9of7ctyR1qEVbsfwS7NTAAJKbgWyJPKvYY5QAHdvTJjb6oT7gSRFsRPZp0ji2ht1bCRyYTuVA2EIXvyM/4tw==} + peerDependencies: + svelte: ^5.0.0 + + '@zag-js/accordion@1.39.1': + resolution: {integrity: sha512-GA3m7gRTm3weSe1eMlHIsTNztcjZ6joIaRgxxKil7q/UX0xIVVGDy0aCr6oo7FAuoMiOOBVurYXILpFZ30nOXA==} + + '@zag-js/anatomy@1.39.1': + resolution: {integrity: sha512-p2iFAs2pVQgv5iCDAftA7g9Z/fUYXW94dRIGk415TSbkp/YDENydm/JtRoNctp302UIx4Eeuc5QBR+7h5kuISA==} + + '@zag-js/aria-hidden@1.39.1': + resolution: {integrity: sha512-wiwcz3N086qBMEU3VKfHhcvGm6Jm1PIcDXys/jEqiKPtHoYZhDip0n0cPOoasss/A1oS39QFVdk3WpLXGu3Izw==} + + '@zag-js/auto-resize@1.39.1': + resolution: {integrity: sha512-ditIo9mW7fapq+4yx3/8hMpMZlWaoOy66EOzUz8dSVqnxnTWAjnTICu/9zFh8pkWerlzGTtDOJPP1oZ8S/rgVg==} + + '@zag-js/avatar@1.39.1': + resolution: {integrity: sha512-LWrgJ0bebnXPSL+uehA9z6BlCD/MZEOQBJqH/F2QQFSAAZXUUDKtzVDmc+UtwjDsHXqqTghi+v2atQJHNMcJ2g==} + + '@zag-js/carousel@1.39.1': + resolution: {integrity: sha512-5z5z3IldUgZ/R+KZLNQDoJFNTXzYd28YOmgfWH61Vvyv+RarX8kwZW8ajW/fNiqcWXyhW3/VMU0lArrfjbQVtQ==} + + '@zag-js/collapsible@1.39.1': + resolution: {integrity: sha512-Zgccg/t7M8i0JVwZPPgW7XB7kGhTO475hsmwkF/8CYLqBBckVDHUARp2we24hENCm/98eez6R0eDEmE+tldFWA==} + + '@zag-js/collection@1.39.1': + resolution: {integrity: sha512-fyOyKmP7MRo0/U8mBmB7KgHRXHhXP27LCcasy3x+qTAQtuEfYG1EPhKuj07oBWlX/2qfcKYn2R3YopHcqFcCiA==} + + '@zag-js/combobox@1.39.1': + resolution: {integrity: sha512-fmStpG+k4xrxCzqUX0ssnOMeoSietWm5ir3qmEZcagzNqNycAXMvOELAIeyXi87Kut6aDGhxLOV7o395HVXl/w==} + + '@zag-js/core@1.39.1': + resolution: {integrity: sha512-Yp0r49QLYXe2j7fgyAiilH4umXFydCnr5hcRDwJU+sxvUAlq00JQIJIEK2pT6k8cJiNNsFEV5WkOX7jsqpAX2A==} + + '@zag-js/date-picker@1.39.1': + resolution: {integrity: sha512-t9q1H0aZQJkbzKTR2Bn5vMwaoFoirxekiSxw8ju0F0vr4Kg4BJ9yueOQm5I2wALKnJbZu4Ua5MgzlrDF3CQt3A==} + peerDependencies: + '@internationalized/date': '>=3.0.0' + + '@zag-js/date-utils@1.39.1': + resolution: {integrity: sha512-i4SvBhru2Yz/zsHT0XvyFhf4a+pAKYkWXeVfU0RvF2S6mPTfgaMFF9ZNPq5Sy8K31EtAa6AVXcybYaYnibn1FA==} + peerDependencies: + '@internationalized/date': '>=3.0.0' + + '@zag-js/dialog@1.39.1': + resolution: {integrity: sha512-q+HTmfuRDRZthln9mb7i52wdltQOZlw3+nw3a2uygEe9xuEtHBwUz31XJzkn2UWQqhAt7cC39OwykhNLKrfkqA==} + + '@zag-js/dismissable@1.39.1': + resolution: {integrity: sha512-7/soy93Ersd5qedhSL/+CDcZ9gNTQV0ooDcqKtM8b4IxwD4rgWwGsewJY+tbKmOqaZobwa0YcWV2+YGgI23ESw==} + + '@zag-js/dom-query@1.39.1': + resolution: {integrity: sha512-k01aXeUWLyJfB61CODaXj4PLhYmVpnVMFrC+3nk/XCn1MW7my8L/8KVg0m4W8n+X9MhpaLWsZDmK/dwED/3qSw==} + + '@zag-js/file-upload@1.39.1': + resolution: {integrity: sha512-cErPOnPwPyneUXpelsfm75DKn0/4SI8aqQnlbrqo522PEqAQyDfDdBsqebGgKWG3F0A++kKFp9LO9A5zCrw5gA==} + + '@zag-js/file-utils@1.39.1': + resolution: {integrity: sha512-ll/W5o74SMmoAS+l7PkmmGjPj4PLCSG/cwQh1Y/+LpaSev0YiR3Nk2OzRIIPtm3NivYVxKGawaCOf1RvT/82LQ==} + + '@zag-js/floating-panel@1.39.1': + resolution: {integrity: sha512-IfPbf3pwJGqBWHec/rPzpdPjfMCLed59LlEophvRy49FEdksv8eN6nr9DXl2wWZEoQhH99scXfLMbtEZsPsFWg==} + + '@zag-js/focus-trap@1.39.1': + resolution: {integrity: sha512-2ZzVefHMotvtxUo/gP4R45Szw/EPaPkTKEHaug6/il62SPDbkFODF+5r1zXyLbLuwCHq0apvQasg/ONLihwlXw==} + + '@zag-js/focus-visible@1.39.1': + resolution: {integrity: sha512-iEuTOYHE8HRn/7ULC9c9BTTWo0C0MJRCbYVxbh/d7v8qAuq4CS76pdfceNo3KeWbb968T+yiG6q0AjiHsr8IOw==} + + '@zag-js/i18n-utils@1.39.1': + resolution: {integrity: sha512-TKRLQQlHgJ4cxsHo3tZPtbFjGu9m1UPtfezRGFKq7A8czhdqRhaCpaWF849cd6dI7x6rWvvTan858gOFpyANnQ==} + + '@zag-js/interact-outside@1.39.1': + resolution: {integrity: sha512-LnSbA+txMsFmzNPn84QKH01x2yJv4At/eKHn6rT2PyxXkJQIh8PvCTS3zVz4Syw11cmhcXt2eRwhzx8yImV92w==} + + '@zag-js/listbox@1.39.1': + resolution: {integrity: sha512-Mz0UpdXobdTQTyjM+Avgi7pDVB2dKyaUHqw3TloeleQL3VwTqClclkwHXtLYYE+oXa0zOet37wI9mzfaYx9iZQ==} + + '@zag-js/live-region@1.39.1': + resolution: {integrity: sha512-E7YNd0QGzJ2n1ZhnI2smv+klwifsNRf9QaDCx7quVJCVYywpupsBK4R25KN75S1z8XaK+jAy6HYKj8DIhYjYeg==} + + '@zag-js/menu@1.39.1': + resolution: {integrity: sha512-bRDGLGkiGhzNtORBXkbBQV/xp2zEkwpYIepfWCaUoFwKUmx7GGnShTBFxJyq0u2D4IkS9GOwcqm20EhMv6V+TA==} + + '@zag-js/pagination@1.39.1': + resolution: {integrity: sha512-3Q1B9/g3ajhvXjuGffJ7otyXcXK5+uhdbE5A9CZa4bsW3pf25L9Cp+ZAjdXQMDc8T4jhZJAKFmDJfQgtr1oEIw==} + + '@zag-js/popover@1.39.1': + resolution: {integrity: sha512-aO3ExO/O7Sa3ovdozFI6SujhNOpYdCca4bImnAiovDL8DY8zN3UNQebu35IQvw9/aRsx9VKSJL1AqzJJUImFRw==} + + '@zag-js/popper@1.39.1': + resolution: {integrity: sha512-h0UMY2dXJNfM3OvMQ9t9LzlmwvpCgjloz2IvU1txY3r32UIy7ve1H70zkKagLtLRxFTuWmhumYUPULPo/6a1DA==} + + '@zag-js/progress@1.39.1': + resolution: {integrity: sha512-1IHyOw8DqPs3YH149Oj7W9a5oEfY5pc9GAVOPGbzYxVK/W8d/NIjVxa565I3J5cDJ0s6z3FrMSXMWUwr1ML4tw==} + + '@zag-js/radio-group@1.39.1': + resolution: {integrity: sha512-+sC9xcAyY/GbY+8HpKlbPgSyOxBLUSB18s6fe6K1wdmyom4PM0nmhLouuxisbFZYHOyfQwAOMo+ainRENB2hzQ==} + + '@zag-js/rating-group@1.39.1': + resolution: {integrity: sha512-IfdxWmM+3zpztx/HcE3bWob72sZNb1+BzK4tSySLVyjeqs8OzLDzrCbKqt10DmibnNOvpbjbq4eX4P5hV9YN7Q==} + + '@zag-js/rect-utils@1.39.1': + resolution: {integrity: sha512-5gJ0PzeUme76xTWG+4XythWgmGgDKV4XAxEUaB3KKDtXgjDHwtu7PwKLIzFtlaaSf/U23PY+RNVBVCYg1GmZog==} + + '@zag-js/remove-scroll@1.39.1': + resolution: {integrity: sha512-uZfPR3Gl9sQFo+tJ7kbuwsBhw+RIZwWFnMDgrz5LIwSNGN6hsyC4HGOxe29clkWQ2X2AjqqmEMETwgX7Jg+wxA==} + + '@zag-js/scroll-snap@1.39.1': + resolution: {integrity: sha512-AzCc8MAAVqkiK5Y0cJZ24OIBZDQrUmEexACMuR6M5yZmlcEbS0EA/d6Wq+LSR1JMVTD4B+UwcMj1D3vJQ90ZTw==} + + '@zag-js/slider@1.39.1': + resolution: {integrity: sha512-OEA9R7Ly5cw+6ANofnMpuHH3rAo8gZEnxy7iEwePu11pq2RCnt8DSj2V+uqU+dTq15Uup1LSzRgJfTnAC4Z85A==} + + '@zag-js/steps@1.39.1': + resolution: {integrity: sha512-DC6swMpwITTB0DyCSxlpWyPNSUN9ul9jz4N6aAyQ0L1IK/noF/YYTZRAcXNSRzN4iutO/2mFGGbwGq/oVf+gPA==} + + '@zag-js/store@1.39.1': + resolution: {integrity: sha512-zFpwP4lhiBVD9987rwAfZNVa2/f/xx4mhbCE1EEw31zxLAozY2jONeJ3UzPP05VbzKlRHBcvkaXAJQQGegTwFA==} + + '@zag-js/svelte@1.39.1': + resolution: {integrity: sha512-ZOyZjyvjePZdrkNTy5fa92ijeeID9e+3LRGziKGIII3JSEvwfkG/Buf8W84N8VHFxi0G0GcKmgVCDgKHyGQYoQ==} + peerDependencies: + svelte: '>=5' + + '@zag-js/switch@1.39.1': + resolution: {integrity: sha512-ikeQ42c0vyyPLeyW9U0dvcqTV1Ekpx5jZ050R905HGJ2GeWE0uBGuHbMpTG5U6Pwb0a+TMzqAr+jMsquVTCwzg==} + + '@zag-js/tabs@1.39.1': + resolution: {integrity: sha512-P2RThO1gX9SFsNqrAGPsXJxrjn5YqP6MFs9mdExU+tzzZyVjJQADkAmh98C0eEaCb6HKLpJZ/17hrnLDhm1Tig==} + + '@zag-js/tags-input@1.39.1': + resolution: {integrity: sha512-tc0+bd9FiUJwa+wY2hSVVGHLIBC3C3rOZX/4zjchRMs1xgl92c1/tYbytXny7ABB8ZMHveG7MtgDppVF4VkwBg==} + + '@zag-js/toast@1.39.1': + resolution: {integrity: sha512-K7ndEfBTKDds10iQKCQUmin74s6V4BEIypAIyQxs18gQB9TCn5+wff886JAzecIKPY97PDQHDKjYR71yzRC7/g==} + + '@zag-js/toggle-group@1.39.1': + resolution: {integrity: sha512-KS4Bo17foMKXVBhQjocRf4GQxMV4pMXclTo14IWjldaHs2HIrNJ0Ar0Ri+vo47BBKBNsXs4HuNvfbMdQj94wEA==} + + '@zag-js/tooltip@1.39.1': + resolution: {integrity: sha512-IsxFj7l8kPciwIyYJWlmQ7mhXocbjXxLj3m9z099slYOF7lApA33/ndY32w9ptrI4/nUh2nldzw6eRfSpVnuOA==} + + '@zag-js/tree-view@1.39.1': + resolution: {integrity: sha512-sm6qUZjO0OaqBqO5s55KU+l5p1wXfUVScoen7BYVoFBuROH7qAZJi8YMclGvnnlyV506i8Hk0qqWnLg0F38jCA==} + + '@zag-js/types@1.39.1': + resolution: {integrity: sha512-w3vVpgxmdJvMDvv19DXTtFI6kJL6TXw//U0Z1BAc3rnDA9orcB9Ryw4uMNvIzFA607CgssyJcWDaQ/M3yAcbJw==} + + '@zag-js/utils@1.39.1': + resolution: {integrity: sha512-9k741cH7L655Ua3tedTkuMblcXVXVgCLTB9svp9oTjA7oatpOpYF4z43kgAQVjyThNXMJ7AvtO4C80ajQLTScg==} + acorn@8.16.0: resolution: {integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==} engines: {node: '>=0.4.0'} hasBin: true + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + aria-query@5.3.1: resolution: {integrity: sha512-Z/ZeOgVl7bcSYZ/u/rh0fOpvEpq//LZmdbkXyc7syVzjPAhfOa9ebsdTSjEBDU4vs5nC98Kfduj1uFo0qyET3g==} engines: {node: '>= 0.4'} @@ -485,6 +1113,36 @@ packages: resolution: {integrity: sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==} engines: {node: '>= 0.4'} + bail@2.0.2: + resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==} + + bezier-easing@2.1.0: + resolution: {integrity: sha512-gbIqZ/eslnUFC1tjEvtz0sgx+xTK20wDnYMIA27VA04R7w6xxXQPZDbibjA9DTWZRA2CXtwHykkVzlCaAJAZig==} + + bidi-js@1.0.3: + resolution: {integrity: sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==} + + carta-md@4.11.2: + resolution: {integrity: sha512-oqx2wxuEK4xpT1dq23oYTPSdhWKFwGLYqwjeNXPda0q8rnmz+eOygx/oxApUuEhnOtjUNE4f3D23e0f83HSLEQ==} + peerDependencies: + svelte: ^5.0.0 + + ccount@2.0.1: + resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} + + char-regex@1.0.2: + resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} + engines: {node: '>=10'} + + character-entities-html4@2.1.0: + resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==} + + character-entities-legacy@3.0.0: + resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==} + + character-entities@2.0.2: + resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} + chokidar@4.0.3: resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==} engines: {node: '>= 14.16.0'} @@ -493,6 +1151,27 @@ packages: resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} engines: {node: '>=6'} + comma-separated-tokens@2.0.3: + resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==} + + commander@8.3.0: + resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} + engines: {node: '>= 12'} + + crelt@1.0.6: + resolution: {integrity: sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==} + + css-tree@3.2.1: + resolution: {integrity: sha512-X7sjQzceUhu1u7Y/ylrRZFU2FS6LRiFVp6rKLPg23y3x3c3DOKAwuXGDp+PAGjh6CSnCjYeAul8pcT8bAl+lSA==} + engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0} + + csstype@3.2.3: + resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + + data-urls@7.0.0: + resolution: {integrity: sha512-23XHcCF+coGYevirZceTVD7NdJOqVn+49IHyxgszm+JIiHLoB2TkmPtsYkNWT1pvRSGkc35L6NHs0yHkN2SumA==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + debug@4.4.3: resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} engines: {node: '>=6.0'} @@ -502,10 +1181,20 @@ packages: supports-color: optional: true + decimal.js@10.6.0: + resolution: {integrity: sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==} + + decode-named-character-reference@1.3.0: + resolution: {integrity: sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==} + deepmerge@4.3.1: resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} engines: {node: '>=0.10.0'} + dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + detect-libc@2.1.2: resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} engines: {node: '>=8'} @@ -513,21 +1202,66 @@ packages: devalue@5.6.4: resolution: {integrity: sha512-Gp6rDldRsFh/7XuouDbxMH3Mx8GMCcgzIb1pDTvNyn8pZGQ22u+Wa+lGV9dQCltFQ7uVw0MhRyb8XDskNFOReA==} + devlop@1.1.0: + resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} + + diff@5.2.2: + resolution: {integrity: sha512-vtcDfH3TOjP8UekytvnHH1o1P4FcUdt4eQ1Y+Abap1tk/OB2MWQvcwS2ClCd1zuIhc3JKOx6p3kod8Vfys3E+A==} + engines: {node: '>=0.3.1'} + + dompurify@3.2.7: + resolution: {integrity: sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==} + + dompurify@3.4.1: + resolution: {integrity: sha512-JahakDAIg1gyOm7dlgWSDjV4n7Ip2PKR55NIT6jrMfIgLFgWo81vdr1/QGqWtFNRqXP9UV71oVePtjqS2ebnPw==} + + emojilib@2.4.0: + resolution: {integrity: sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw==} + + emoticon@4.1.0: + resolution: {integrity: sha512-VWZfnxqwNcc51hIy/sbOdEem6D+cVtpPzEEtVAFdaas30+1dgkyaOQ4sQ6Bp0tOMqWO1v+HQfYaoodOkdhK6SQ==} + enhanced-resolve@5.20.1: resolution: {integrity: sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==} engines: {node: '>=10.13.0'} + entities@4.5.0: + resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} + engines: {node: '>=0.12'} + + entities@6.0.1: + resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==} + engines: {node: '>=0.12'} + + entities@8.0.0: + resolution: {integrity: sha512-zwfzJecQ/Uej6tusMqwAqU/6KL2XaB2VZ2Jg54Je6ahNBGNH6Ek6g3jjNCF0fG9EWQKGZNddNjU5F1ZQn/sBnA==} + engines: {node: '>=20.19.0'} + esbuild@0.25.12: resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==} engines: {node: '>=18'} hasBin: true + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + escape-string-regexp@5.0.0: + resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} + engines: {node: '>=12'} + esm-env@1.2.2: resolution: {integrity: sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==} esrap@2.2.4: resolution: {integrity: sha512-suICpxAmZ9A8bzJjEl/+rLJiDKC0X4gYWUxT6URAWBLvlXmtbZd5ySMu/N2ZGEtMCAmflUDPSehrP9BQcsGcSg==} + extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + + fault@2.0.1: + resolution: {integrity: sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==} + fdir@6.5.0: resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} engines: {node: '>=12.0.0'} @@ -537,21 +1271,109 @@ packages: picomatch: optional: true + format@0.2.2: + resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} + engines: {node: '>=0.4.x'} + fsevents@2.3.3: resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] + github-markdown-css@5.9.0: + resolution: {integrity: sha512-tmT5sY+zvg2302XLYEfH2mtkViIM1SWf2nvYoF5N1ZsO0V6B2qZTiw3GOzw4vpjLygK/KG35qRlPFweHqfzz5w==} + engines: {node: '>=10'} + + github-slugger@2.0.0: + resolution: {integrity: sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==} + graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + hast-util-from-dom@5.0.1: + resolution: {integrity: sha512-N+LqofjR2zuzTjCPzyDUdSshy4Ma6li7p/c3pA78uTwzFgENbgbUrm2ugwsOdcjI1muO+o6Dgzp9p8WHtn/39Q==} + + hast-util-from-html-isomorphic@2.0.0: + resolution: {integrity: sha512-zJfpXq44yff2hmE0XmwEOzdWin5xwH+QIhMLOScpX91e/NSGPsAzNCvLQDIEPyO2TXi+lBmU6hjLIhV8MwP2kw==} + + hast-util-from-html@2.0.3: + resolution: {integrity: sha512-CUSRHXyKjzHov8yKsQjGOElXy/3EKpyX56ELnkHH34vDVw1N1XSQ1ZcAvTyAPtGqLTuKP/uxM+aLkSPqF/EtMw==} + + hast-util-from-parse5@8.0.3: + resolution: {integrity: sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==} + + hast-util-heading-rank@3.0.0: + resolution: {integrity: sha512-EJKb8oMUXVHcWZTDepnr+WNbfnXKFNf9duMesmr4S8SXTJBJ9M4Yok08pu9vxdJwdlGRhVumk9mEhkEvKGifwA==} + + hast-util-is-element@3.0.0: + resolution: {integrity: sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g==} + + hast-util-parse-selector@4.0.0: + resolution: {integrity: sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==} + + hast-util-to-html@9.0.5: + resolution: {integrity: sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==} + + hast-util-to-string@3.0.1: + resolution: {integrity: sha512-XelQVTDWvqcl3axRfI0xSeoVKzyIFPwsAGSLIsKdJKQMXDYJS4WYrBNF/8J7RdhIcFI2BOHgAifggsvsxp/3+A==} + + hast-util-to-text@4.0.2: + resolution: {integrity: sha512-KK6y/BN8lbaq654j7JgBydev7wuNMcID54lkRav1P0CaE1e47P72AWWPiGKXTJU271ooYzcvTAn/Zt0REnvc7A==} + + hast-util-whitespace@3.0.0: + resolution: {integrity: sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==} + + hastscript@9.0.1: + resolution: {integrity: sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==} + + highlight.js@11.8.0: + resolution: {integrity: sha512-MedQhoqVdr0U6SSnWPzfiadUcDHfN/Wzq25AkXiQv9oiOO/sG0S7XkvpFIqWBl9Yq1UYyYOOVORs5UW2XlPyzg==} + engines: {node: '>=12.0.0'} + + html-encoding-sniffer@6.0.0: + resolution: {integrity: sha512-CV9TW3Y3f8/wT0BRFc1/KAVQ3TUHiXmaAb6VW9vtiMFf7SLoMd1PdAc4W3KFOFETBJUb90KatHqlsZMWV+R9Gg==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + + html-void-elements@3.0.0: + resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==} + + iconify-icon@1.0.8: + resolution: {integrity: sha512-jvbUKHXf8EnGGArmhlP2IG8VqQLFFyTvTqb9LVL2TKTh7/eCCD1o2HHE9thpbJJb6B8hzhcFb6rOKhvo7reNKA==} + + is-plain-obj@4.1.0: + resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} + engines: {node: '>=12'} + + is-potential-custom-element-name@1.0.1: + resolution: {integrity: sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==} + is-reference@3.0.3: resolution: {integrity: sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==} + isomorphic-dompurify@3.10.0: + resolution: {integrity: sha512-Gj2duy4dACsP/FLPvwJ3+MXTlGtOo+O4yfpA0jdxuz/sZlbZzazGzScajOHRwH7PCy4j3bh5ibLGJY4/Rb5kGQ==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24.0.0} + jiti@2.6.1: resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} hasBin: true + jsbi@4.3.2: + resolution: {integrity: sha512-9fqMSQbhJykSeii05nxKl4m6Eqn2P6rOlYiS+C5Dr/HPIU/7yZxu5qzbs40tgaFORiw2Amd0mirjxatXYMkIew==} + + jsdom@29.0.2: + resolution: {integrity: sha512-9VnGEBosc/ZpwyOsJBCQ/3I5p7Q5ngOY14a9bf5btenAORmZfDse1ZEheMiWcJ3h81+Fv7HmJFdS0szo/waF2w==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24.0.0} + peerDependencies: + canvas: ^3.0.0 + peerDependenciesMeta: + canvas: + optional: true + + katex@0.16.45: + resolution: {integrity: sha512-pQpZbdBu7wCTmQUh7ufPmLr0pFoObnGUoL/yhtwJDgmmQpbkg/0HSVti25Fu4rmd1oCR6NGWe9vqTWuWv3GcNA==} + hasBin: true + kleur@4.1.5: resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} engines: {node: '>=6'} @@ -630,12 +1452,175 @@ packages: resolution: {integrity: sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==} engines: {node: '>= 12.0.0'} + linkify-it@5.0.0: + resolution: {integrity: sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==} + + linkifyjs@4.3.2: + resolution: {integrity: sha512-NT1CJtq3hHIreOianA8aSXn6Cw0JzYOuDQbOrSPe7gqFnCpKP++MQe3ODgO3oh2GJFORkAAdqredOa60z63GbA==} + locate-character@3.0.0: resolution: {integrity: sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==} + longest-streak@3.1.0: + resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==} + + lowlight@2.9.0: + resolution: {integrity: sha512-OpcaUTCLmHuVuBcyNckKfH5B0oA4JUavb/M/8n9iAvanJYNQkrVm4pvyX0SUaqkBG4dnWHKt7p50B3ngAG2Rfw==} + + lru-cache@11.3.5: + resolution: {integrity: sha512-NxVFwLAnrd9i7KUBxC4DrUhmgjzOs+1Qm50D3oF1/oL+r1NpZ4gA7xvG0/zJ8evR7zIKn4vLf7qTNduWFtCrRw==} + engines: {node: 20 || >=22} + magic-string@0.30.21: resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + markdown-it@14.1.1: + resolution: {integrity: sha512-BuU2qnTti9YKgK5N+IeMubp14ZUKUUw7yeJbkjtosvHiP0AZ5c8IAgEMk79D0eC8F23r4Ac/q8cAIFdm2FtyoA==} + hasBin: true + + markdown-table@3.0.4: + resolution: {integrity: sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==} + + marked@14.0.0: + resolution: {integrity: sha512-uIj4+faQ+MgHgwUW1l2PsPglZLOLOT1uErt06dAPtx2kjteLAkbsd/0FiYg/MGS+i7ZKLb7w2WClxHkzOOuryQ==} + engines: {node: '>= 18'} + hasBin: true + + mdast-util-find-and-replace@3.0.2: + resolution: {integrity: sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==} + + mdast-util-from-markdown@2.0.3: + resolution: {integrity: sha512-W4mAWTvSlKvf8L6J+VN9yLSqQ9AOAAvHuoDAmPkz4dHf553m5gVj2ejadHJhoJmcmxEnOv6Pa8XJhpxE93kb8Q==} + + mdast-util-gfm-autolink-literal@2.0.1: + resolution: {integrity: sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==} + + mdast-util-gfm-footnote@2.1.0: + resolution: {integrity: sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==} + + mdast-util-gfm-strikethrough@2.0.0: + resolution: {integrity: sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==} + + mdast-util-gfm-table@2.0.0: + resolution: {integrity: sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==} + + mdast-util-gfm-task-list-item@2.0.0: + resolution: {integrity: sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==} + + mdast-util-gfm@3.1.0: + resolution: {integrity: sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==} + + mdast-util-math@3.0.0: + resolution: {integrity: sha512-Tl9GBNeG/AhJnQM221bJR2HPvLOSnLE/T9cJI9tlc6zwQk2nPk/4f0cHkOdEixQPC/j8UtKDdITswvLAy1OZ1w==} + + mdast-util-phrasing@4.1.0: + resolution: {integrity: sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==} + + mdast-util-to-hast@13.2.1: + resolution: {integrity: sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==} + + mdast-util-to-markdown@2.1.2: + resolution: {integrity: sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==} + + mdast-util-to-string@4.0.0: + resolution: {integrity: sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==} + + mdn-data@2.27.1: + resolution: {integrity: sha512-9Yubnt3e8A0OKwxYSXyhLymGW4sCufcLG6VdiDdUGVkPhpqLxlvP5vl1983gQjJl3tqbrM731mjaZaP68AgosQ==} + + mdurl@2.0.0: + resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==} + + micromark-core-commonmark@2.0.3: + resolution: {integrity: sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==} + + micromark-extension-gfm-autolink-literal@2.1.0: + resolution: {integrity: sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==} + + micromark-extension-gfm-footnote@2.1.0: + resolution: {integrity: sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==} + + micromark-extension-gfm-strikethrough@2.1.0: + resolution: {integrity: sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==} + + micromark-extension-gfm-table@2.1.1: + resolution: {integrity: sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==} + + micromark-extension-gfm-tagfilter@2.0.0: + resolution: {integrity: sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==} + + micromark-extension-gfm-task-list-item@2.1.0: + resolution: {integrity: sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==} + + micromark-extension-gfm@3.0.0: + resolution: {integrity: sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==} + + micromark-extension-math@3.1.0: + resolution: {integrity: sha512-lvEqd+fHjATVs+2v/8kg9i5Q0AP2k85H0WUOwpIVvUML8BapsMvh1XAogmQjOCsLpoKRCVQqEkQBB3NhVBcsOg==} + + micromark-factory-destination@2.0.1: + resolution: {integrity: sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==} + + micromark-factory-label@2.0.1: + resolution: {integrity: sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==} + + micromark-factory-space@2.0.1: + resolution: {integrity: sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==} + + micromark-factory-title@2.0.1: + resolution: {integrity: sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==} + + micromark-factory-whitespace@2.0.1: + resolution: {integrity: sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==} + + micromark-util-character@2.1.1: + resolution: {integrity: sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==} + + micromark-util-chunked@2.0.1: + resolution: {integrity: sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==} + + micromark-util-classify-character@2.0.1: + resolution: {integrity: sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==} + + micromark-util-combine-extensions@2.0.1: + resolution: {integrity: sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==} + + micromark-util-decode-numeric-character-reference@2.0.2: + resolution: {integrity: sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==} + + micromark-util-decode-string@2.0.1: + resolution: {integrity: sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==} + + micromark-util-encode@2.0.1: + resolution: {integrity: sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==} + + micromark-util-html-tag-name@2.0.1: + resolution: {integrity: sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==} + + micromark-util-normalize-identifier@2.0.1: + resolution: {integrity: sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==} + + micromark-util-resolve-all@2.0.1: + resolution: {integrity: sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==} + + micromark-util-sanitize-uri@2.0.1: + resolution: {integrity: sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==} + + micromark-util-subtokenize@2.1.0: + resolution: {integrity: sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==} + + micromark-util-symbol@2.0.1: + resolution: {integrity: sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==} + + micromark-util-types@2.0.2: + resolution: {integrity: sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==} + + micromark@4.0.2: + resolution: {integrity: sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==} + + monaco-editor@0.55.1: + resolution: {integrity: sha512-jz4x+TJNFHwHtwuV9vA9rMujcZRb0CEilTEwG2rRSpe/A7Jdkuj8xPKttCgOh+v/lkHy7HsZ64oj+q3xoAFl9A==} + mri@1.2.0: resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} engines: {node: '>=4'} @@ -648,6 +1633,25 @@ packages: engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true + node-emoji@2.2.0: + resolution: {integrity: sha512-Z3lTE9pLaJF47NyMhd4ww1yFTAP8YhYI8SleJiHzM46Fgpm5cnNzSl9XfzFNqbaz+VlJrIj3fXQ4DeN1Rjm6cw==} + engines: {node: '>=18'} + + oniguruma-parser@0.12.2: + resolution: {integrity: sha512-6HVa5oIrgMC6aA6WF6XyyqbhRPJrKR02L20+2+zpDtO5QAzGHAUGw5TKQvwi5vctNnRHkJYmjAhRVQF2EKdTQw==} + + oniguruma-to-es@4.3.6: + resolution: {integrity: sha512-csuQ9x3Yr0cEIs/Zgx/OEt9iBw9vqIunAPQkx19R/fiMq2oGVTgcMqO/V3Ybqefr1TBvosI6jU539ksaBULJyA==} + + orderedmap@2.1.1: + resolution: {integrity: sha512-TvAWxi0nDe1j/rtMcWcIj94+Ffe6n7zhow33h40SKxmsmozs6dz/e+EajymfoFcHd7sxNn8yHM8839uixMOV6g==} + + parse5@7.3.0: + resolution: {integrity: sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==} + + parse5@8.0.1: + resolution: {integrity: sha512-z1e/HMG90obSGeidlli3hj7cbocou0/wa5HacvI3ASx34PecNjNQeaHNo5WIZpWofN9kgkqV1q5YvXe3F0FoPw==} + picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} @@ -659,23 +1663,167 @@ packages: resolution: {integrity: sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==} engines: {node: ^10 || ^12 || >=14} + property-information@7.1.0: + resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} + + prosemirror-changeset@2.4.1: + resolution: {integrity: sha512-96WBLhOaYhJ+kPhLg3uW359Tz6I/MfcrQfL4EGv4SrcqKEMC1gmoGrXHecPE8eOwTVCJ4IwgfzM8fFad25wNfw==} + + prosemirror-collab@1.3.1: + resolution: {integrity: sha512-4SnynYR9TTYaQVXd/ieUvsVV4PDMBzrq2xPUWutHivDuOshZXqQ5rGbZM84HEaXKbLdItse7weMGOUdDVcLKEQ==} + + prosemirror-commands@1.7.1: + resolution: {integrity: sha512-rT7qZnQtx5c0/y/KlYaGvtG411S97UaL6gdp6RIZ23DLHanMYLyfGBV5DtSnZdthQql7W+lEVbpSfwtO8T+L2w==} + + prosemirror-dropcursor@1.8.2: + resolution: {integrity: sha512-CCk6Gyx9+Tt2sbYk5NK0nB1ukHi2ryaRgadV/LvyNuO3ena1payM2z6Cg0vO1ebK8cxbzo41ku2DE5Axj1Zuiw==} + + prosemirror-gapcursor@1.4.1: + resolution: {integrity: sha512-pMdYaEnjNMSwl11yjEGtgTmLkR08m/Vl+Jj443167p9eB3HVQKhYCc4gmHVDsLPODfZfjr/MmirsdyZziXbQKw==} + + prosemirror-history@1.5.0: + resolution: {integrity: sha512-zlzTiH01eKA55UAf1MEjtssJeHnGxO0j4K4Dpx+gnmX9n+SHNlDqI2oO1Kv1iPN5B1dm5fsljCfqKF9nFL6HRg==} + + prosemirror-inputrules@1.5.1: + resolution: {integrity: sha512-7wj4uMjKaXWAQ1CDgxNzNtR9AlsuwzHfdFH1ygEHA2KHF2DOEaXl1CJfNPAKCg9qNEh4rum975QLaCiQPyY6Fw==} + + prosemirror-keymap@1.2.3: + resolution: {integrity: sha512-4HucRlpiLd1IPQQXNqeo81BGtkY8Ai5smHhKW9jjPKRc2wQIxksg7Hl1tTI2IfT2B/LgX6bfYvXxEpJl7aKYKw==} + + prosemirror-markdown@1.13.4: + resolution: {integrity: sha512-D98dm4cQ3Hs6EmjK500TdAOew4Z03EV71ajEFiWra3Upr7diytJsjF4mPV2dW+eK5uNectiRj0xFxYI9NLXDbw==} + + prosemirror-menu@1.3.2: + resolution: {integrity: sha512-6VgUJTYod0nMBlCaYJGhXGLu7Gt4AvcwcOq0YfJCY/6Uh+3S7UsWhpy6rJFCBFOmonq1hD8KyWOtZhkppd4YPg==} + + prosemirror-model@1.25.4: + resolution: {integrity: sha512-PIM7E43PBxKce8OQeezAs9j4TP+5yDpZVbuurd1h5phUxEKIu+G2a+EUZzIC5nS1mJktDJWzbqS23n1tsAf5QA==} + + prosemirror-schema-basic@1.2.4: + resolution: {integrity: sha512-ELxP4TlX3yr2v5rM7Sb70SqStq5NvI15c0j9j/gjsrO5vaw+fnnpovCLEGIcpeGfifkuqJwl4fon6b+KdrODYQ==} + + prosemirror-schema-list@1.5.1: + resolution: {integrity: sha512-927lFx/uwyQaGwJxLWCZRkjXG0p48KpMj6ueoYiu4JX05GGuGcgzAy62dfiV8eFZftgyBUvLx76RsMe20fJl+Q==} + + prosemirror-state@1.4.4: + resolution: {integrity: sha512-6jiYHH2CIGbCfnxdHbXZ12gySFY/fz/ulZE333G6bPqIZ4F+TXo9ifiR86nAHpWnfoNjOb3o5ESi7J8Uz1jXHw==} + + prosemirror-tables@1.8.5: + resolution: {integrity: sha512-V/0cDCsHKHe/tfWkeCmthNUcEp1IVO3p6vwN8XtwE9PZQLAZJigbw3QoraAdfJPir4NKJtNvOB8oYGKRl+t0Dw==} + + prosemirror-trailing-node@3.0.0: + resolution: {integrity: sha512-xiun5/3q0w5eRnGYfNlW1uU9W6x5MoFKWwq/0TIRgt09lv7Hcser2QYV8t4muXbEr+Fwo0geYn79Xs4GKywrRQ==} + peerDependencies: + prosemirror-model: ^1.22.1 + prosemirror-state: ^1.4.2 + prosemirror-view: ^1.33.8 + + prosemirror-transform@1.12.0: + resolution: {integrity: sha512-GxboyN4AMIsoHNtz5uf2r2Ru551i5hWeCMD6E2Ib4Eogqoub0NflniaBPVQ4MrGE5yZ8JV9tUHg9qcZTTrcN4w==} + + prosemirror-view@1.41.8: + resolution: {integrity: sha512-TnKDdohEatgyZNGCDWIdccOHXhYloJwbwU+phw/a23KBvJIR9lWQWW7WHHK3vBdOLDNuF7TaX98GObUZOWkOnA==} + + proxy-compare@3.0.1: + resolution: {integrity: sha512-V9plBAt3qjMlS1+nC8771KNf6oJ12gExvaxnNzN/9yVRLdTv/lc+oJlnSzrdYDAvBfTStPCoiaCOTmTs0adv7Q==} + + punycode.js@2.3.1: + resolution: {integrity: sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==} + engines: {node: '>=6'} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + readdirp@4.1.2: resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} engines: {node: '>= 14.18.0'} + regex-recursion@6.0.2: + resolution: {integrity: sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==} + + regex-utilities@2.3.0: + resolution: {integrity: sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==} + + regex@6.1.0: + resolution: {integrity: sha512-6VwtthbV4o/7+OaAF9I5L5V3llLEsoPyq9P1JVXkedTP33c7MfCG0/5NOPcSJn0TzXcG9YUrR0gQSWioew3LDg==} + + rehype-autolink-headings@7.1.0: + resolution: {integrity: sha512-rItO/pSdvnvsP4QRB1pmPiNHUskikqtPojZKJPPPAVx9Hj8i8TwMBhofrrAYRhYOOBZH9tgmG5lPqDLuIWPWmw==} + + rehype-katex@7.0.1: + resolution: {integrity: sha512-OiM2wrZ/wuhKkigASodFoo8wimG3H12LWQaH8qSPVJn9apWKFSH3YOCtbKpBorTVw/eI7cuT21XBbvwEswbIOA==} + + rehype-parse@9.0.1: + resolution: {integrity: sha512-ksCzCD0Fgfh7trPDxr2rSylbwq9iYDkSn8TCDmEJ49ljEUBxDVCzCHv7QNzZOfODanX4+bWQ4WZqLCRWYLfhag==} + + rehype-slug@6.0.0: + resolution: {integrity: sha512-lWyvf/jwu+oS5+hL5eClVd3hNdmwM1kAC0BUvEGD19pajQMIzcNUd/k9GsfQ+FfECvX+JE+e9/btsKH0EjJT6A==} + + rehype-stringify@10.0.1: + resolution: {integrity: sha512-k9ecfXHmIPuFVI61B9DeLPN0qFHfawM6RsuX48hoqlaKSF61RskNjSm1lI8PhBEM0MRdLxVVm4WmTqJQccH9mA==} + + remark-emoji@5.0.2: + resolution: {integrity: sha512-IyIqGELcyK5AVdLFafoiNww+Eaw/F+rGrNSXoKucjo95uL267zrddgxGM83GN1wFIb68pyDuAsY3m5t2Cav1pQ==} + engines: {node: '>=18'} + + remark-gfm@4.0.1: + resolution: {integrity: sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==} + + remark-math@6.0.0: + resolution: {integrity: sha512-MMqgnP74Igy+S3WwnhQ7kqGlEerTETXMvJhrUzDikVZ2/uogJCb+WHUg97hK9/jcfc0dkD73s3LN8zU49cTEtA==} + + remark-parse@11.0.0: + resolution: {integrity: sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==} + + remark-rehype@11.1.2: + resolution: {integrity: sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==} + + remark-stringify@11.0.0: + resolution: {integrity: sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==} + + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + rollup@4.60.1: resolution: {integrity: sha512-VmtB2rFU/GroZ4oL8+ZqXgSA38O6GR8KSIvWmEFv63pQ0G6KaBH9s07PO8XTXP4vI+3UJUEypOfjkGfmSBBR0w==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true + rope-sequence@1.3.4: + resolution: {integrity: sha512-UT5EDe2cu2E/6O4igUr5PSFs23nvvukicWHx6GnOPlHAiiYbzNuCRQCuiUdHJQcqKalLKlrYJnjY0ySGsXNQXQ==} + sade@1.8.1: resolution: {integrity: sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==} engines: {node: '>=6'} + saxes@6.0.0: + resolution: {integrity: sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==} + engines: {node: '>=v12.22.7'} + + semver@7.7.4: + resolution: {integrity: sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==} + engines: {node: '>=10'} + hasBin: true + + shiki@3.23.0: + resolution: {integrity: sha512-55Dj73uq9ZXL5zyeRPzHQsK7Nbyt6Y10k5s7OjuFZGMhpp4r/rsLBH0o/0fstIzX1Lep9VxefWljK/SKCzygIA==} + + skin-tone@2.0.0: + resolution: {integrity: sha512-kUMbT1oBJCpgrnKoSr0o6wPtvRWT9W9UKvGLwfJYO2WuahZRHOpEyL1ckyMGgMWh0UdpmaoFqKKD29WTomNEGA==} + engines: {node: '>=8'} + source-map-js@1.2.1: resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} engines: {node: '>=0.10.0'} + space-separated-tokens@2.0.2: + resolution: {integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==} + + stringify-entities@4.0.4: + resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==} + svelte-check@4.4.6: resolution: {integrity: sha512-kP1zG81EWaFe9ZyTv4ZXv44Csi6Pkdpb7S3oj6m+K2ec/IcDg/a8LsFsnVLqm2nxtkSwsd5xPj/qFkTBgXHXjg==} engines: {node: '>= 18.0.0'} @@ -688,6 +1836,9 @@ packages: resolution: {integrity: sha512-QjvU7EFemf6mRzdMGlAFttMWtAAVXrax61SZYHdkD6yoVGQ89VeyKfZD4H1JrV1WLmJBxWhFch9H6ig/87VGjw==} engines: {node: '>=18'} + symbol-tree@3.2.4: + resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} + tailwindcss@4.2.2: resolution: {integrity: sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q==} @@ -699,14 +1850,93 @@ packages: resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} engines: {node: '>=12.0.0'} + tippy.js@6.3.7: + resolution: {integrity: sha512-E1d3oP2emgJ9dRQZdf3Kkn0qJgI6ZLpyS5z6ZkY1DF3kaQaBsGZsndEpHwx+eC+tYM41HaSNvNtLx8tU57FzTQ==} + + tldts-core@7.0.28: + resolution: {integrity: sha512-7W5Efjhsc3chVdFhqtaU0KtK32J37Zcr9RKtID54nG+tIpcY79CQK/veYPODxtD/LJ4Lue66jvrQzIX2Z2/pUQ==} + + tldts@7.0.28: + resolution: {integrity: sha512-+Zg3vWhRUv8B1maGSTFdev9mjoo8Etn2Ayfs4cnjlD3CsGkxXX4QyW3j2WJ0wdjYcYmy7Lx2RDsZMhgCWafKIw==} + hasBin: true + + tough-cookie@6.0.1: + resolution: {integrity: sha512-LktZQb3IeoUWB9lqR5EWTHgW/VTITCXg4D21M+lvybRVdylLrRMnqaIONLVb5mav8vM19m44HIcGq4qASeu2Qw==} + engines: {node: '>=16'} + + tr46@6.0.0: + resolution: {integrity: sha512-bLVMLPtstlZ4iMQHpFHTR7GAGj2jxi8Dg0s2h2MafAE4uSWF98FC/3MomU51iQAMf8/qDUbKWf5GxuvvVcXEhw==} + engines: {node: '>=20'} + + trim-lines@3.0.1: + resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==} + + trough@2.2.0: + resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==} + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + typescript@5.9.3: resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} engines: {node: '>=14.17'} hasBin: true + uc.micro@2.1.0: + resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==} + undici-types@7.16.0: resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==} + undici@7.25.0: + resolution: {integrity: sha512-xXnp4kTyor2Zq+J1FfPI6Eq3ew5h6Vl0F/8d9XU5zZQf1tX9s2Su1/3PiMmUANFULpmksxkClamIZcaUqryHsQ==} + engines: {node: '>=20.18.1'} + + unicode-emoji-modifier-base@1.0.0: + resolution: {integrity: sha512-yLSH4py7oFH3oG/9K+XWrz1pSi3dfUrWEnInbxMfArOfc1+33BlGPQtLsOYwvdMy11AwUBetYuaRxSPqgkq+8g==} + engines: {node: '>=4'} + + unified@11.0.5: + resolution: {integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==} + + unist-util-find-after@5.0.0: + resolution: {integrity: sha512-amQa0Ep2m6hE2g72AugUItjbuM8X8cGQnFoHk0pGfrFeT9GZhzN5SW8nRsiGKK7Aif4CrACPENkA6P/Lw6fHGQ==} + + unist-util-is@6.0.1: + resolution: {integrity: sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==} + + unist-util-position@5.0.0: + resolution: {integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==} + + unist-util-remove-position@5.0.0: + resolution: {integrity: sha512-Hp5Kh3wLxv0PHj9m2yZhhLt58KzPtEYKQQ4yxfYFEO7EvHwzyDYnduhHnY1mDxoqr7VUwVuHXk9RXKIiYS1N8Q==} + + unist-util-stringify-position@4.0.0: + resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==} + + unist-util-visit-parents@6.0.2: + resolution: {integrity: sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==} + + unist-util-visit@5.1.0: + resolution: {integrity: sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==} + + uuid@11.1.0: + resolution: {integrity: sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==} + hasBin: true + + uuid@13.0.0: + resolution: {integrity: sha512-XQegIaBTVUjSHliKqcnFqYypAd4S+WCYt5NIeRs6w/UAry7z8Y9j5ZwRRL4kzq9U3sD6v+85er9FvkEaBpji2w==} + hasBin: true + + vfile-location@5.0.3: + resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==} + + vfile-message@4.0.3: + resolution: {integrity: sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==} + + vfile@6.0.3: + resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} + vite@6.4.1: resolution: {integrity: sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} @@ -755,11 +1985,131 @@ packages: vite: optional: true + w3c-keyname@2.2.8: + resolution: {integrity: sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==} + + w3c-xmlserializer@5.0.0: + resolution: {integrity: sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==} + engines: {node: '>=18'} + + web-namespaces@2.0.1: + resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==} + + webidl-conversions@8.0.1: + resolution: {integrity: sha512-BMhLD/Sw+GbJC21C/UgyaZX41nPt8bUTg+jWyDeg7e7YN4xOM05YPSIXceACnXVtqyEw/LMClUQMtMZ+PGGpqQ==} + engines: {node: '>=20'} + + whatwg-mimetype@5.0.0: + resolution: {integrity: sha512-sXcNcHOC51uPGF0P/D4NVtrkjSU2fNsm9iog4ZvZJsL3rjoDAzXZhkm2MWt1y+PUdggKAYVoMAIYcs78wJ51Cw==} + engines: {node: '>=20'} + + whatwg-url@16.0.1: + resolution: {integrity: sha512-1to4zXBxmXHV3IiSSEInrreIlu02vUOvrhxJJH5vcxYTBDAx51cqZiKdyTxlecdKNSjj8EcxGBxNf6Vg+945gw==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + + xml-name-validator@5.0.0: + resolution: {integrity: sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==} + engines: {node: '>=18'} + + xmlchars@2.2.0: + resolution: {integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==} + zimmerframe@1.1.4: resolution: {integrity: sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ==} + zwitch@2.0.4: + resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} + snapshots: + '@asamuzakjp/css-color@5.1.11': + dependencies: + '@asamuzakjp/generational-cache': 1.0.1 + '@csstools/css-calc': 3.2.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0) + '@csstools/css-color-parser': 4.1.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0) + '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0) + '@csstools/css-tokenizer': 4.0.0 + + '@asamuzakjp/dom-selector@7.1.1': + dependencies: + '@asamuzakjp/generational-cache': 1.0.1 + '@asamuzakjp/nwsapi': 2.3.9 + bidi-js: 1.0.3 + css-tree: 3.2.1 + is-potential-custom-element-name: 1.0.1 + + '@asamuzakjp/generational-cache@1.0.1': {} + + '@asamuzakjp/nwsapi@2.3.9': {} + + '@bramus/specificity@2.4.2': + dependencies: + css-tree: 3.2.1 + + '@cartamd/plugin-anchor@2.2.0(carta-md@4.11.2(svelte@5.55.1))': + dependencies: + carta-md: 4.11.2(svelte@5.55.1) + rehype-autolink-headings: 7.1.0 + rehype-slug: 6.0.0 + + '@cartamd/plugin-attachment@4.2.0(carta-md@4.11.2(svelte@5.55.1))': + dependencies: + carta-md: 4.11.2(svelte@5.55.1) + + '@cartamd/plugin-code@4.2.0(carta-md@4.11.2(svelte@5.55.1))': + dependencies: + '@shikijs/rehype': 3.23.0 + carta-md: 4.11.2(svelte@5.55.1) + unified: 11.0.5 + + '@cartamd/plugin-component@1.1.1(carta-md@4.11.2(svelte@5.55.1))(unified@11.0.5)': + dependencies: + carta-md: 4.11.2(svelte@5.55.1) + esm-env: 1.2.2 + rehype-parse: 9.0.1 + unified: 11.0.5 + unist-util-visit: 5.1.0 + + '@cartamd/plugin-emoji@4.3.0(carta-md@4.11.2(svelte@5.55.1))': + dependencies: + bezier-easing: 2.1.0 + carta-md: 4.11.2(svelte@5.55.1) + node-emoji: 2.2.0 + remark-emoji: 5.0.2 + + '@cartamd/plugin-math@4.3.1(carta-md@4.11.2(svelte@5.55.1))(svelte@5.55.1)': + dependencies: + carta-md: 4.11.2(svelte@5.55.1) + rehype-katex: 7.0.1 + remark-math: 6.0.0 + svelte: 5.55.1 + transitivePeerDependencies: + - supports-color + + '@csstools/color-helpers@6.0.2': {} + + '@csstools/css-calc@3.2.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)': + dependencies: + '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0) + '@csstools/css-tokenizer': 4.0.0 + + '@csstools/css-color-parser@4.1.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)': + dependencies: + '@csstools/color-helpers': 6.0.2 + '@csstools/css-calc': 3.2.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0) + '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0) + '@csstools/css-tokenizer': 4.0.0 + + '@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0)': + dependencies: + '@csstools/css-tokenizer': 4.0.0 + + '@csstools/css-syntax-patches-for-csstree@1.1.3(css-tree@3.2.1)': + optionalDependencies: + css-tree: 3.2.1 + + '@csstools/css-tokenizer@4.0.0': {} + '@esbuild/aix-ppc64@0.25.12': optional: true @@ -838,6 +2188,45 @@ snapshots: '@esbuild/win32-x64@0.25.12': optional: true + '@exodus/bytes@1.15.0': {} + + '@floating-ui/core@1.7.5': + dependencies: + '@floating-ui/utils': 0.2.11 + + '@floating-ui/dom@1.7.6': + dependencies: + '@floating-ui/core': 1.7.5 + '@floating-ui/utils': 0.2.11 + + '@floating-ui/utils@0.2.11': {} + + '@friendofsvelte/tipex@0.1.1(highlight.js@11.8.0)(svelte@5.55.1)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/extension-code-block': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/extension-code-block-lowlight': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/extension-code-block@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)(highlight.js@11.8.0)(lowlight@2.9.0) + '@tiptap/extension-floating-menu': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/extension-image': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-link': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/extension-placeholder': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/extension-task-item': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/extension-task-list': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-underline': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/pm': 2.27.2 + '@tiptap/starter-kit': 2.27.2 + iconify-icon: 1.0.8 + lowlight: 2.9.0 + svelte: 5.55.1 + transitivePeerDependencies: + - highlight.js + + '@iconify/types@2.0.0': {} + + '@internationalized/date@3.12.0': + dependencies: + '@swc/helpers': 0.5.21 + '@jridgewell/gen-mapping@0.3.13': dependencies: '@jridgewell/sourcemap-codec': 1.5.5 @@ -857,6 +2246,14 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@js-temporal/polyfill@0.5.1': + dependencies: + jsbi: 4.3.2 + + '@popperjs/core@2.11.8': {} + + '@remirror/core-constants@3.0.0': {} + '@rollup/rollup-android-arm-eabi@4.60.1': optional: true @@ -932,6 +2329,177 @@ snapshots: '@rollup/rollup-win32-x64-msvc@4.60.1': optional: true + '@sentry-internal/browser-utils@10.49.0': + dependencies: + '@sentry/core': 10.49.0 + + '@sentry-internal/feedback@10.49.0': + dependencies: + '@sentry/core': 10.49.0 + + '@sentry-internal/replay-canvas@10.49.0': + dependencies: + '@sentry-internal/replay': 10.49.0 + '@sentry/core': 10.49.0 + + '@sentry-internal/replay@10.49.0': + dependencies: + '@sentry-internal/browser-utils': 10.49.0 + '@sentry/core': 10.49.0 + + '@sentry/browser@10.49.0': + dependencies: + '@sentry-internal/browser-utils': 10.49.0 + '@sentry-internal/feedback': 10.49.0 + '@sentry-internal/replay': 10.49.0 + '@sentry-internal/replay-canvas': 10.49.0 + '@sentry/core': 10.49.0 + + '@sentry/core@10.49.0': {} + + '@sentry/svelte@10.49.0(svelte@5.55.1)': + dependencies: + '@sentry/browser': 10.49.0 + '@sentry/core': 10.49.0 + magic-string: 0.30.21 + svelte: 5.55.1 + + '@shikijs/core@3.23.0': + dependencies: + '@shikijs/types': 3.23.0 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + hast-util-to-html: 9.0.5 + + '@shikijs/engine-javascript@3.23.0': + dependencies: + '@shikijs/types': 3.23.0 + '@shikijs/vscode-textmate': 10.0.2 + oniguruma-to-es: 4.3.6 + + '@shikijs/engine-oniguruma@3.23.0': + dependencies: + '@shikijs/types': 3.23.0 + '@shikijs/vscode-textmate': 10.0.2 + + '@shikijs/langs@3.23.0': + dependencies: + '@shikijs/types': 3.23.0 + + '@shikijs/rehype@3.23.0': + dependencies: + '@shikijs/types': 3.23.0 + '@types/hast': 3.0.4 + hast-util-to-string: 3.0.1 + shiki: 3.23.0 + unified: 11.0.5 + unist-util-visit: 5.1.0 + + '@shikijs/themes@3.23.0': + dependencies: + '@shikijs/types': 3.23.0 + + '@shikijs/types@3.23.0': + dependencies: + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + + '@shikijs/vscode-textmate@10.0.2': {} + + '@sindresorhus/is@4.6.0': {} + + '@skeletonlabs/skeleton-common@4.15.2': {} + + '@skeletonlabs/skeleton-svelte@4.15.2(svelte@5.55.1)': + dependencies: + '@internationalized/date': 3.12.0 + '@skeletonlabs/skeleton-common': 4.15.2 + '@zag-js/accordion': 1.39.1 + '@zag-js/avatar': 1.39.1 + '@zag-js/carousel': 1.39.1 + '@zag-js/collapsible': 1.39.1 + '@zag-js/collection': 1.39.1 + '@zag-js/combobox': 1.39.1 + '@zag-js/date-picker': 1.39.1(@internationalized/date@3.12.0) + '@zag-js/dialog': 1.39.1 + '@zag-js/file-upload': 1.39.1 + '@zag-js/floating-panel': 1.39.1 + '@zag-js/listbox': 1.39.1 + '@zag-js/menu': 1.39.1 + '@zag-js/pagination': 1.39.1 + '@zag-js/popover': 1.39.1 + '@zag-js/progress': 1.39.1 + '@zag-js/radio-group': 1.39.1 + '@zag-js/rating-group': 1.39.1 + '@zag-js/slider': 1.39.1 + '@zag-js/steps': 1.39.1 + '@zag-js/svelte': 1.39.1(svelte@5.55.1) + '@zag-js/switch': 1.39.1 + '@zag-js/tabs': 1.39.1 + '@zag-js/tags-input': 1.39.1 + '@zag-js/toast': 1.39.1 + '@zag-js/toggle-group': 1.39.1 + '@zag-js/tooltip': 1.39.1 + '@zag-js/tree-view': 1.39.1 + svelte: 5.55.1 + + '@skeletonlabs/skeleton@4.15.2(tailwindcss@4.2.2)': + dependencies: + tailwindcss: 4.2.2 + + '@svar-ui/core-locales@2.5.1': {} + + '@svar-ui/grid-data-provider@2.6.2': + dependencies: + '@svar-ui/lib-data-provider': 1.7.2 + + '@svar-ui/grid-locales@2.6.2': {} + + '@svar-ui/grid-store@2.6.2': + dependencies: + '@svar-ui/lib-dom': 0.12.1 + '@svar-ui/lib-state': 1.9.6 + + '@svar-ui/lib-data-provider@1.7.2': + dependencies: + '@svar-ui/lib-state': 1.9.6 + + '@svar-ui/lib-dom@0.12.1': {} + + '@svar-ui/lib-state@1.9.6': {} + + '@svar-ui/lib-svelte@0.5.2': + dependencies: + '@svar-ui/lib-state': 1.9.6 + + '@svar-ui/svelte-core@2.5.1': + dependencies: + '@svar-ui/core-locales': 2.5.1 + '@svar-ui/lib-dom': 0.12.1 + '@svar-ui/lib-svelte': 0.5.2 + + '@svar-ui/svelte-grid@2.6.2': + dependencies: + '@svar-ui/grid-data-provider': 2.6.2 + '@svar-ui/grid-locales': 2.6.2 + '@svar-ui/grid-store': 2.6.2 + '@svar-ui/lib-dom': 0.12.1 + '@svar-ui/lib-state': 1.9.6 + '@svar-ui/lib-svelte': 0.5.2 + '@svar-ui/svelte-core': 2.5.1 + '@svar-ui/svelte-menu': 2.5.1 + '@svar-ui/svelte-toolbar': 2.5.1 + + '@svar-ui/svelte-menu@2.5.1': + dependencies: + '@svar-ui/lib-dom': 0.12.1 + '@svar-ui/svelte-core': 2.5.1 + + '@svar-ui/svelte-toolbar@2.5.1': + dependencies: + '@svar-ui/lib-dom': 0.12.1 + '@svar-ui/svelte-core': 2.5.1 + '@sveltejs/acorn-typescript@1.0.9(acorn@8.16.0)': dependencies: acorn: 8.16.0 @@ -958,6 +2526,10 @@ snapshots: transitivePeerDependencies: - supports-color + '@swc/helpers@0.5.21': + dependencies: + tslib: 2.8.1 + '@tailwindcss/node@4.2.2': dependencies: '@jridgewell/remapping': 2.3.5 @@ -1026,43 +2598,712 @@ snapshots: tailwindcss: 4.2.2 vite: 6.4.1(@types/node@24.12.2)(jiti@2.6.1)(lightningcss@1.32.0) + '@tanstack/svelte-virtual@3.13.24(svelte@5.55.1)': + dependencies: + '@tanstack/virtual-core': 3.14.0 + svelte: 5.55.1 + + '@tanstack/virtual-core@3.14.0': {} + + '@tiptap/core@2.27.2(@tiptap/pm@2.27.2)': + dependencies: + '@tiptap/pm': 2.27.2 + + '@tiptap/extension-blockquote@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-bold@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-bullet-list@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-code-block-lowlight@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/extension-code-block@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)(highlight.js@11.8.0)(lowlight@2.9.0)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/extension-code-block': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/pm': 2.27.2 + highlight.js: 11.8.0 + lowlight: 2.9.0 + + '@tiptap/extension-code-block@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/pm': 2.27.2 + + '@tiptap/extension-code@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-document@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-dropcursor@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/pm': 2.27.2 + + '@tiptap/extension-floating-menu@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/pm': 2.27.2 + tippy.js: 6.3.7 + + '@tiptap/extension-gapcursor@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/pm': 2.27.2 + + '@tiptap/extension-hard-break@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-heading@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-history@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/pm': 2.27.2 + + '@tiptap/extension-horizontal-rule@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/pm': 2.27.2 + + '@tiptap/extension-image@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-italic@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-link@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/pm': 2.27.2 + linkifyjs: 4.3.2 + + '@tiptap/extension-list-item@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-ordered-list@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-paragraph@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-placeholder@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/pm': 2.27.2 + + '@tiptap/extension-strike@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-task-item@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2)': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/pm': 2.27.2 + + '@tiptap/extension-task-list@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-text-style@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-text@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/extension-underline@2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + + '@tiptap/pm@2.27.2': + dependencies: + prosemirror-changeset: 2.4.1 + prosemirror-collab: 1.3.1 + prosemirror-commands: 1.7.1 + prosemirror-dropcursor: 1.8.2 + prosemirror-gapcursor: 1.4.1 + prosemirror-history: 1.5.0 + prosemirror-inputrules: 1.5.1 + prosemirror-keymap: 1.2.3 + prosemirror-markdown: 1.13.4 + prosemirror-menu: 1.3.2 + prosemirror-model: 1.25.4 + prosemirror-schema-basic: 1.2.4 + prosemirror-schema-list: 1.5.1 + prosemirror-state: 1.4.4 + prosemirror-tables: 1.8.5 + prosemirror-trailing-node: 3.0.0(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-view@1.41.8) + prosemirror-transform: 1.12.0 + prosemirror-view: 1.41.8 + + '@tiptap/starter-kit@2.27.2': + dependencies: + '@tiptap/core': 2.27.2(@tiptap/pm@2.27.2) + '@tiptap/extension-blockquote': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-bold': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-bullet-list': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-code': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-code-block': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/extension-document': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-dropcursor': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/extension-gapcursor': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/extension-hard-break': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-heading': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-history': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/extension-horizontal-rule': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2))(@tiptap/pm@2.27.2) + '@tiptap/extension-italic': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-list-item': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-ordered-list': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-paragraph': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-strike': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-text': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/extension-text-style': 2.27.2(@tiptap/core@2.27.2(@tiptap/pm@2.27.2)) + '@tiptap/pm': 2.27.2 + + '@types/debug@4.1.13': + dependencies: + '@types/ms': 2.1.0 + '@types/estree@1.0.8': {} + '@types/hast@2.3.10': + dependencies: + '@types/unist': 2.0.11 + + '@types/hast@3.0.4': + dependencies: + '@types/unist': 3.0.3 + + '@types/katex@0.16.8': {} + + '@types/linkify-it@5.0.0': {} + + '@types/markdown-it@14.1.2': + dependencies: + '@types/linkify-it': 5.0.0 + '@types/mdurl': 2.0.0 + + '@types/mdast@4.0.4': + dependencies: + '@types/unist': 3.0.3 + + '@types/mdurl@2.0.0': {} + + '@types/ms@2.1.0': {} + '@types/node@24.12.2': dependencies: undici-types: 7.16.0 '@types/trusted-types@2.0.7': {} + '@types/unist@2.0.11': {} + + '@types/unist@3.0.3': {} + '@typescript-eslint/types@8.58.0': {} + '@ungap/structured-clone@1.3.0': {} + + '@warkypublic/artemis-kit@1.0.10': + dependencies: + semver: 7.7.4 + uuid: 11.1.0 + + '@warkypublic/artemis-kit@file:../../artemis-kit': + dependencies: + semver: 7.7.4 + uuid: 11.1.0 + + '@warkypublic/resolvespec-js@1.0.1': + dependencies: + uuid: 13.0.0 + + '@warkypublic/svelix@0.1.37(highlight.js@11.8.0)(svelte@5.55.1)(unified@11.0.5)': + dependencies: + '@cartamd/plugin-anchor': 2.2.0(carta-md@4.11.2(svelte@5.55.1)) + '@cartamd/plugin-attachment': 4.2.0(carta-md@4.11.2(svelte@5.55.1)) + '@cartamd/plugin-code': 4.2.0(carta-md@4.11.2(svelte@5.55.1)) + '@cartamd/plugin-component': 1.1.1(carta-md@4.11.2(svelte@5.55.1))(unified@11.0.5) + '@cartamd/plugin-emoji': 4.3.0(carta-md@4.11.2(svelte@5.55.1)) + '@cartamd/plugin-math': 4.3.1(carta-md@4.11.2(svelte@5.55.1))(svelte@5.55.1) + '@friendofsvelte/tipex': 0.1.1(highlight.js@11.8.0)(svelte@5.55.1) + '@js-temporal/polyfill': 0.5.1 + '@svar-ui/svelte-grid': 2.6.2 + '@warkypublic/artemis-kit': 1.0.10 + '@warkypublic/resolvespec-js': 1.0.1 + carta-md: 4.11.2(svelte@5.55.1) + github-markdown-css: 5.9.0 + isomorphic-dompurify: 3.10.0 + katex: 0.16.45 + monaco-editor: 0.55.1 + svelte: 5.55.1 + transitivePeerDependencies: + - '@noble/hashes' + - canvas + - highlight.js + - supports-color + - unified + + '@zag-js/accordion@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/anatomy@1.39.1': {} + + '@zag-js/aria-hidden@1.39.1': + dependencies: + '@zag-js/dom-query': 1.39.1 + + '@zag-js/auto-resize@1.39.1': + dependencies: + '@zag-js/dom-query': 1.39.1 + + '@zag-js/avatar@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/carousel@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/scroll-snap': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/collapsible@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/collection@1.39.1': + dependencies: + '@zag-js/utils': 1.39.1 + + '@zag-js/combobox@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/collection': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dismissable': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/focus-visible': 1.39.1 + '@zag-js/live-region': 1.39.1 + '@zag-js/popper': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/core@1.39.1': + dependencies: + '@zag-js/dom-query': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/date-picker@1.39.1(@internationalized/date@3.12.0)': + dependencies: + '@internationalized/date': 3.12.0 + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/date-utils': 1.39.1(@internationalized/date@3.12.0) + '@zag-js/dismissable': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/live-region': 1.39.1 + '@zag-js/popper': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/date-utils@1.39.1(@internationalized/date@3.12.0)': + dependencies: + '@internationalized/date': 3.12.0 + + '@zag-js/dialog@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/aria-hidden': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dismissable': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/focus-trap': 1.39.1 + '@zag-js/remove-scroll': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/dismissable@1.39.1': + dependencies: + '@zag-js/dom-query': 1.39.1 + '@zag-js/interact-outside': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/dom-query@1.39.1': + dependencies: + '@zag-js/types': 1.39.1 + + '@zag-js/file-upload@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/file-utils': 1.39.1 + '@zag-js/i18n-utils': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/file-utils@1.39.1': + dependencies: + '@zag-js/i18n-utils': 1.39.1 + + '@zag-js/floating-panel@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/popper': 1.39.1 + '@zag-js/rect-utils': 1.39.1 + '@zag-js/store': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/focus-trap@1.39.1': + dependencies: + '@zag-js/dom-query': 1.39.1 + + '@zag-js/focus-visible@1.39.1': + dependencies: + '@zag-js/dom-query': 1.39.1 + + '@zag-js/i18n-utils@1.39.1': + dependencies: + '@zag-js/dom-query': 1.39.1 + + '@zag-js/interact-outside@1.39.1': + dependencies: + '@zag-js/dom-query': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/listbox@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/collection': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/focus-visible': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/live-region@1.39.1': {} + + '@zag-js/menu@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dismissable': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/focus-visible': 1.39.1 + '@zag-js/popper': 1.39.1 + '@zag-js/rect-utils': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/pagination@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/popover@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/aria-hidden': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dismissable': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/focus-trap': 1.39.1 + '@zag-js/popper': 1.39.1 + '@zag-js/remove-scroll': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/popper@1.39.1': + dependencies: + '@floating-ui/dom': 1.7.6 + '@zag-js/dom-query': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/progress@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/radio-group@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/focus-visible': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/rating-group@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/rect-utils@1.39.1': {} + + '@zag-js/remove-scroll@1.39.1': + dependencies: + '@zag-js/dom-query': 1.39.1 + + '@zag-js/scroll-snap@1.39.1': + dependencies: + '@zag-js/dom-query': 1.39.1 + + '@zag-js/slider@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/steps@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/store@1.39.1': + dependencies: + proxy-compare: 3.0.1 + + '@zag-js/svelte@1.39.1(svelte@5.55.1)': + dependencies: + '@zag-js/core': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + svelte: 5.55.1 + + '@zag-js/switch@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/focus-visible': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/tabs@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/tags-input@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/auto-resize': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/interact-outside': 1.39.1 + '@zag-js/live-region': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/toast@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dismissable': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/toggle-group@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/tooltip@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/focus-visible': 1.39.1 + '@zag-js/popper': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/tree-view@1.39.1': + dependencies: + '@zag-js/anatomy': 1.39.1 + '@zag-js/collection': 1.39.1 + '@zag-js/core': 1.39.1 + '@zag-js/dom-query': 1.39.1 + '@zag-js/types': 1.39.1 + '@zag-js/utils': 1.39.1 + + '@zag-js/types@1.39.1': + dependencies: + csstype: 3.2.3 + + '@zag-js/utils@1.39.1': {} + acorn@8.16.0: {} + argparse@2.0.1: {} + aria-query@5.3.1: {} axobject-query@4.1.0: {} + bail@2.0.2: {} + + bezier-easing@2.1.0: {} + + bidi-js@1.0.3: + dependencies: + require-from-string: 2.0.2 + + carta-md@4.11.2(svelte@5.55.1): + dependencies: + diff: 5.2.2 + esm-env: 1.2.2 + rehype-stringify: 10.0.1 + remark-gfm: 4.0.1 + remark-parse: 11.0.0 + remark-rehype: 11.1.2 + shiki: 3.23.0 + svelte: 5.55.1 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + ccount@2.0.1: {} + + char-regex@1.0.2: {} + + character-entities-html4@2.1.0: {} + + character-entities-legacy@3.0.0: {} + + character-entities@2.0.2: {} + chokidar@4.0.3: dependencies: readdirp: 4.1.2 clsx@2.1.1: {} + comma-separated-tokens@2.0.3: {} + + commander@8.3.0: {} + + crelt@1.0.6: {} + + css-tree@3.2.1: + dependencies: + mdn-data: 2.27.1 + source-map-js: 1.2.1 + + csstype@3.2.3: {} + + data-urls@7.0.0: + dependencies: + whatwg-mimetype: 5.0.0 + whatwg-url: 16.0.1 + transitivePeerDependencies: + - '@noble/hashes' + debug@4.4.3: dependencies: ms: 2.1.3 + decimal.js@10.6.0: {} + + decode-named-character-reference@1.3.0: + dependencies: + character-entities: 2.0.2 + deepmerge@4.3.1: {} + dequal@2.0.3: {} + detect-libc@2.1.2: {} devalue@5.6.4: {} + devlop@1.1.0: + dependencies: + dequal: 2.0.3 + + diff@5.2.2: {} + + dompurify@3.2.7: + optionalDependencies: + '@types/trusted-types': 2.0.7 + + dompurify@3.4.1: + optionalDependencies: + '@types/trusted-types': 2.0.7 + + emojilib@2.4.0: {} + + emoticon@4.1.0: {} + enhanced-resolve@5.20.1: dependencies: graceful-fs: 4.2.11 tapable: 2.3.2 + entities@4.5.0: {} + + entities@6.0.1: {} + + entities@8.0.0: {} + esbuild@0.25.12: optionalDependencies: '@esbuild/aix-ppc64': 0.25.12 @@ -1092,6 +3333,10 @@ snapshots: '@esbuild/win32-ia32': 0.25.12 '@esbuild/win32-x64': 0.25.12 + escape-string-regexp@4.0.0: {} + + escape-string-regexp@5.0.0: {} + esm-env@1.2.2: {} esrap@2.2.4: @@ -1099,21 +3344,173 @@ snapshots: '@jridgewell/sourcemap-codec': 1.5.5 '@typescript-eslint/types': 8.58.0 + extend@3.0.2: {} + + fault@2.0.1: + dependencies: + format: 0.2.2 + fdir@6.5.0(picomatch@4.0.4): optionalDependencies: picomatch: 4.0.4 + format@0.2.2: {} + fsevents@2.3.3: optional: true + github-markdown-css@5.9.0: {} + + github-slugger@2.0.0: {} + graceful-fs@4.2.11: {} + hast-util-from-dom@5.0.1: + dependencies: + '@types/hast': 3.0.4 + hastscript: 9.0.1 + web-namespaces: 2.0.1 + + hast-util-from-html-isomorphic@2.0.0: + dependencies: + '@types/hast': 3.0.4 + hast-util-from-dom: 5.0.1 + hast-util-from-html: 2.0.3 + unist-util-remove-position: 5.0.0 + + hast-util-from-html@2.0.3: + dependencies: + '@types/hast': 3.0.4 + devlop: 1.1.0 + hast-util-from-parse5: 8.0.3 + parse5: 7.3.0 + vfile: 6.0.3 + vfile-message: 4.0.3 + + hast-util-from-parse5@8.0.3: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + devlop: 1.1.0 + hastscript: 9.0.1 + property-information: 7.1.0 + vfile: 6.0.3 + vfile-location: 5.0.3 + web-namespaces: 2.0.1 + + hast-util-heading-rank@3.0.0: + dependencies: + '@types/hast': 3.0.4 + + hast-util-is-element@3.0.0: + dependencies: + '@types/hast': 3.0.4 + + hast-util-parse-selector@4.0.0: + dependencies: + '@types/hast': 3.0.4 + + hast-util-to-html@9.0.5: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + comma-separated-tokens: 2.0.3 + hast-util-whitespace: 3.0.0 + html-void-elements: 3.0.0 + mdast-util-to-hast: 13.2.1 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + stringify-entities: 4.0.4 + zwitch: 2.0.4 + + hast-util-to-string@3.0.1: + dependencies: + '@types/hast': 3.0.4 + + hast-util-to-text@4.0.2: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + hast-util-is-element: 3.0.0 + unist-util-find-after: 5.0.0 + + hast-util-whitespace@3.0.0: + dependencies: + '@types/hast': 3.0.4 + + hastscript@9.0.1: + dependencies: + '@types/hast': 3.0.4 + comma-separated-tokens: 2.0.3 + hast-util-parse-selector: 4.0.0 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + + highlight.js@11.8.0: {} + + html-encoding-sniffer@6.0.0: + dependencies: + '@exodus/bytes': 1.15.0 + transitivePeerDependencies: + - '@noble/hashes' + + html-void-elements@3.0.0: {} + + iconify-icon@1.0.8: + dependencies: + '@iconify/types': 2.0.0 + + is-plain-obj@4.1.0: {} + + is-potential-custom-element-name@1.0.1: {} + is-reference@3.0.3: dependencies: '@types/estree': 1.0.8 + isomorphic-dompurify@3.10.0: + dependencies: + dompurify: 3.4.1 + jsdom: 29.0.2 + transitivePeerDependencies: + - '@noble/hashes' + - canvas + jiti@2.6.1: {} + jsbi@4.3.2: {} + + jsdom@29.0.2: + dependencies: + '@asamuzakjp/css-color': 5.1.11 + '@asamuzakjp/dom-selector': 7.1.1 + '@bramus/specificity': 2.4.2 + '@csstools/css-syntax-patches-for-csstree': 1.1.3(css-tree@3.2.1) + '@exodus/bytes': 1.15.0 + css-tree: 3.2.1 + data-urls: 7.0.0 + decimal.js: 10.6.0 + html-encoding-sniffer: 6.0.0 + is-potential-custom-element-name: 1.0.1 + lru-cache: 11.3.5 + parse5: 8.0.1 + saxes: 6.0.0 + symbol-tree: 3.2.4 + tough-cookie: 6.0.1 + undici: 7.25.0 + w3c-xmlserializer: 5.0.0 + webidl-conversions: 8.0.1 + whatwg-mimetype: 5.0.0 + whatwg-url: 16.0.1 + xml-name-validator: 5.0.0 + transitivePeerDependencies: + - '@noble/hashes' + + katex@0.16.45: + dependencies: + commander: 8.3.0 + kleur@4.1.5: {} lightningcss-android-arm64@1.32.0: @@ -1165,18 +3562,408 @@ snapshots: lightningcss-win32-arm64-msvc: 1.32.0 lightningcss-win32-x64-msvc: 1.32.0 + linkify-it@5.0.0: + dependencies: + uc.micro: 2.1.0 + + linkifyjs@4.3.2: {} + locate-character@3.0.0: {} + longest-streak@3.1.0: {} + + lowlight@2.9.0: + dependencies: + '@types/hast': 2.3.10 + fault: 2.0.1 + highlight.js: 11.8.0 + + lru-cache@11.3.5: {} + magic-string@0.30.21: dependencies: '@jridgewell/sourcemap-codec': 1.5.5 + markdown-it@14.1.1: + dependencies: + argparse: 2.0.1 + entities: 4.5.0 + linkify-it: 5.0.0 + mdurl: 2.0.0 + punycode.js: 2.3.1 + uc.micro: 2.1.0 + + markdown-table@3.0.4: {} + + marked@14.0.0: {} + + mdast-util-find-and-replace@3.0.2: + dependencies: + '@types/mdast': 4.0.4 + escape-string-regexp: 5.0.0 + unist-util-is: 6.0.1 + unist-util-visit-parents: 6.0.2 + + mdast-util-from-markdown@2.0.3: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + mdast-util-to-string: 4.0.0 + micromark: 4.0.2 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-decode-string: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + unist-util-stringify-position: 4.0.0 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-autolink-literal@2.0.1: + dependencies: + '@types/mdast': 4.0.4 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-find-and-replace: 3.0.2 + micromark-util-character: 2.1.1 + + mdast-util-gfm-footnote@2.1.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + micromark-util-normalize-identifier: 2.0.1 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-strikethrough@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-table@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + markdown-table: 3.0.4 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-task-list-item@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm@3.1.0: + dependencies: + mdast-util-from-markdown: 2.0.3 + mdast-util-gfm-autolink-literal: 2.0.1 + mdast-util-gfm-footnote: 2.1.0 + mdast-util-gfm-strikethrough: 2.0.0 + mdast-util-gfm-table: 2.0.0 + mdast-util-gfm-task-list-item: 2.0.0 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-math@3.0.0: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + devlop: 1.1.0 + longest-streak: 3.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + unist-util-remove-position: 5.0.0 + transitivePeerDependencies: + - supports-color + + mdast-util-phrasing@4.1.0: + dependencies: + '@types/mdast': 4.0.4 + unist-util-is: 6.0.1 + + mdast-util-to-hast@13.2.1: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@ungap/structured-clone': 1.3.0 + devlop: 1.1.0 + micromark-util-sanitize-uri: 2.0.1 + trim-lines: 3.0.1 + unist-util-position: 5.0.0 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + + mdast-util-to-markdown@2.1.2: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + longest-streak: 3.1.0 + mdast-util-phrasing: 4.1.0 + mdast-util-to-string: 4.0.0 + micromark-util-classify-character: 2.0.1 + micromark-util-decode-string: 2.0.1 + unist-util-visit: 5.1.0 + zwitch: 2.0.4 + + mdast-util-to-string@4.0.0: + dependencies: + '@types/mdast': 4.0.4 + + mdn-data@2.27.1: {} + + mdurl@2.0.0: {} + + micromark-core-commonmark@2.0.3: + dependencies: + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + micromark-factory-destination: 2.0.1 + micromark-factory-label: 2.0.1 + micromark-factory-space: 2.0.1 + micromark-factory-title: 2.0.1 + micromark-factory-whitespace: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-classify-character: 2.0.1 + micromark-util-html-tag-name: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-subtokenize: 2.1.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-autolink-literal@2.1.0: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-footnote@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-core-commonmark: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-strikethrough@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-util-chunked: 2.0.1 + micromark-util-classify-character: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-table@2.1.1: + dependencies: + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-tagfilter@2.0.0: + dependencies: + micromark-util-types: 2.0.2 + + micromark-extension-gfm-task-list-item@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm@3.0.0: + dependencies: + micromark-extension-gfm-autolink-literal: 2.1.0 + micromark-extension-gfm-footnote: 2.1.0 + micromark-extension-gfm-strikethrough: 2.1.0 + micromark-extension-gfm-table: 2.1.1 + micromark-extension-gfm-tagfilter: 2.0.0 + micromark-extension-gfm-task-list-item: 2.1.0 + micromark-util-combine-extensions: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-math@3.1.0: + dependencies: + '@types/katex': 0.16.8 + devlop: 1.1.0 + katex: 0.16.45 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-destination@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-label@2.0.1: + dependencies: + devlop: 1.1.0 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-space@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-types: 2.0.2 + + micromark-factory-title@2.0.1: + dependencies: + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-whitespace@2.0.1: + dependencies: + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-character@2.1.1: + dependencies: + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-chunked@2.0.1: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-classify-character@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-combine-extensions@2.0.1: + dependencies: + micromark-util-chunked: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-decode-numeric-character-reference@2.0.2: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-decode-string@2.0.1: + dependencies: + decode-named-character-reference: 1.3.0 + micromark-util-character: 2.1.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-symbol: 2.0.1 + + micromark-util-encode@2.0.1: {} + + micromark-util-html-tag-name@2.0.1: {} + + micromark-util-normalize-identifier@2.0.1: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-resolve-all@2.0.1: + dependencies: + micromark-util-types: 2.0.2 + + micromark-util-sanitize-uri@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-encode: 2.0.1 + micromark-util-symbol: 2.0.1 + + micromark-util-subtokenize@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-util-chunked: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-symbol@2.0.1: {} + + micromark-util-types@2.0.2: {} + + micromark@4.0.2: + dependencies: + '@types/debug': 4.1.13 + debug: 4.4.3 + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + micromark-core-commonmark: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-combine-extensions: 2.0.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-encode: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-subtokenize: 2.1.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + transitivePeerDependencies: + - supports-color + + monaco-editor@0.55.1: + dependencies: + dompurify: 3.2.7 + marked: 14.0.0 + mri@1.2.0: {} ms@2.1.3: {} nanoid@3.3.11: {} + node-emoji@2.2.0: + dependencies: + '@sindresorhus/is': 4.6.0 + char-regex: 1.0.2 + emojilib: 2.4.0 + skin-tone: 2.0.0 + + oniguruma-parser@0.12.2: {} + + oniguruma-to-es@4.3.6: + dependencies: + oniguruma-parser: 0.12.2 + regex: 6.1.0 + regex-recursion: 6.0.2 + + orderedmap@2.1.1: {} + + parse5@7.3.0: + dependencies: + entities: 6.0.1 + + parse5@8.0.1: + dependencies: + entities: 8.0.0 + picocolors@1.1.1: {} picomatch@4.0.4: {} @@ -1187,8 +3974,221 @@ snapshots: picocolors: 1.1.1 source-map-js: 1.2.1 + property-information@7.1.0: {} + + prosemirror-changeset@2.4.1: + dependencies: + prosemirror-transform: 1.12.0 + + prosemirror-collab@1.3.1: + dependencies: + prosemirror-state: 1.4.4 + + prosemirror-commands@1.7.1: + dependencies: + prosemirror-model: 1.25.4 + prosemirror-state: 1.4.4 + prosemirror-transform: 1.12.0 + + prosemirror-dropcursor@1.8.2: + dependencies: + prosemirror-state: 1.4.4 + prosemirror-transform: 1.12.0 + prosemirror-view: 1.41.8 + + prosemirror-gapcursor@1.4.1: + dependencies: + prosemirror-keymap: 1.2.3 + prosemirror-model: 1.25.4 + prosemirror-state: 1.4.4 + prosemirror-view: 1.41.8 + + prosemirror-history@1.5.0: + dependencies: + prosemirror-state: 1.4.4 + prosemirror-transform: 1.12.0 + prosemirror-view: 1.41.8 + rope-sequence: 1.3.4 + + prosemirror-inputrules@1.5.1: + dependencies: + prosemirror-state: 1.4.4 + prosemirror-transform: 1.12.0 + + prosemirror-keymap@1.2.3: + dependencies: + prosemirror-state: 1.4.4 + w3c-keyname: 2.2.8 + + prosemirror-markdown@1.13.4: + dependencies: + '@types/markdown-it': 14.1.2 + markdown-it: 14.1.1 + prosemirror-model: 1.25.4 + + prosemirror-menu@1.3.2: + dependencies: + crelt: 1.0.6 + prosemirror-commands: 1.7.1 + prosemirror-history: 1.5.0 + prosemirror-state: 1.4.4 + + prosemirror-model@1.25.4: + dependencies: + orderedmap: 2.1.1 + + prosemirror-schema-basic@1.2.4: + dependencies: + prosemirror-model: 1.25.4 + + prosemirror-schema-list@1.5.1: + dependencies: + prosemirror-model: 1.25.4 + prosemirror-state: 1.4.4 + prosemirror-transform: 1.12.0 + + prosemirror-state@1.4.4: + dependencies: + prosemirror-model: 1.25.4 + prosemirror-transform: 1.12.0 + prosemirror-view: 1.41.8 + + prosemirror-tables@1.8.5: + dependencies: + prosemirror-keymap: 1.2.3 + prosemirror-model: 1.25.4 + prosemirror-state: 1.4.4 + prosemirror-transform: 1.12.0 + prosemirror-view: 1.41.8 + + prosemirror-trailing-node@3.0.0(prosemirror-model@1.25.4)(prosemirror-state@1.4.4)(prosemirror-view@1.41.8): + dependencies: + '@remirror/core-constants': 3.0.0 + escape-string-regexp: 4.0.0 + prosemirror-model: 1.25.4 + prosemirror-state: 1.4.4 + prosemirror-view: 1.41.8 + + prosemirror-transform@1.12.0: + dependencies: + prosemirror-model: 1.25.4 + + prosemirror-view@1.41.8: + dependencies: + prosemirror-model: 1.25.4 + prosemirror-state: 1.4.4 + prosemirror-transform: 1.12.0 + + proxy-compare@3.0.1: {} + + punycode.js@2.3.1: {} + + punycode@2.3.1: {} + readdirp@4.1.2: {} + regex-recursion@6.0.2: + dependencies: + regex-utilities: 2.3.0 + + regex-utilities@2.3.0: {} + + regex@6.1.0: + dependencies: + regex-utilities: 2.3.0 + + rehype-autolink-headings@7.1.0: + dependencies: + '@types/hast': 3.0.4 + '@ungap/structured-clone': 1.3.0 + hast-util-heading-rank: 3.0.0 + hast-util-is-element: 3.0.0 + unified: 11.0.5 + unist-util-visit: 5.1.0 + + rehype-katex@7.0.1: + dependencies: + '@types/hast': 3.0.4 + '@types/katex': 0.16.8 + hast-util-from-html-isomorphic: 2.0.0 + hast-util-to-text: 4.0.2 + katex: 0.16.45 + unist-util-visit-parents: 6.0.2 + vfile: 6.0.3 + + rehype-parse@9.0.1: + dependencies: + '@types/hast': 3.0.4 + hast-util-from-html: 2.0.3 + unified: 11.0.5 + + rehype-slug@6.0.0: + dependencies: + '@types/hast': 3.0.4 + github-slugger: 2.0.0 + hast-util-heading-rank: 3.0.0 + hast-util-to-string: 3.0.1 + unist-util-visit: 5.1.0 + + rehype-stringify@10.0.1: + dependencies: + '@types/hast': 3.0.4 + hast-util-to-html: 9.0.5 + unified: 11.0.5 + + remark-emoji@5.0.2: + dependencies: + '@types/mdast': 4.0.4 + emoticon: 4.1.0 + mdast-util-find-and-replace: 3.0.2 + node-emoji: 2.2.0 + unified: 11.0.5 + + remark-gfm@4.0.1: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-gfm: 3.1.0 + micromark-extension-gfm: 3.0.0 + remark-parse: 11.0.0 + remark-stringify: 11.0.0 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-math@6.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-math: 3.0.0 + micromark-extension-math: 3.1.0 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-parse@11.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-from-markdown: 2.0.3 + micromark-util-types: 2.0.2 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-rehype@11.1.2: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + mdast-util-to-hast: 13.2.1 + unified: 11.0.5 + vfile: 6.0.3 + + remark-stringify@11.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-to-markdown: 2.1.2 + unified: 11.0.5 + + require-from-string@2.0.2: {} + rollup@4.60.1: dependencies: '@types/estree': 1.0.8 @@ -1220,12 +4220,42 @@ snapshots: '@rollup/rollup-win32-x64-msvc': 4.60.1 fsevents: 2.3.3 + rope-sequence@1.3.4: {} + sade@1.8.1: dependencies: mri: 1.2.0 + saxes@6.0.0: + dependencies: + xmlchars: 2.2.0 + + semver@7.7.4: {} + + shiki@3.23.0: + dependencies: + '@shikijs/core': 3.23.0 + '@shikijs/engine-javascript': 3.23.0 + '@shikijs/engine-oniguruma': 3.23.0 + '@shikijs/langs': 3.23.0 + '@shikijs/themes': 3.23.0 + '@shikijs/types': 3.23.0 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + + skin-tone@2.0.0: + dependencies: + unicode-emoji-modifier-base: 1.0.0 + source-map-js@1.2.1: {} + space-separated-tokens@2.0.2: {} + + stringify-entities@4.0.4: + dependencies: + character-entities-html4: 2.1.0 + character-entities-legacy: 3.0.0 + svelte-check@4.4.6(picomatch@4.0.4)(svelte@5.55.1)(typescript@5.9.3): dependencies: '@jridgewell/trace-mapping': 0.3.31 @@ -1257,6 +4287,8 @@ snapshots: magic-string: 0.30.21 zimmerframe: 1.1.4 + symbol-tree@3.2.4: {} + tailwindcss@4.2.2: {} tapable@2.3.2: {} @@ -1266,10 +4298,102 @@ snapshots: fdir: 6.5.0(picomatch@4.0.4) picomatch: 4.0.4 + tippy.js@6.3.7: + dependencies: + '@popperjs/core': 2.11.8 + + tldts-core@7.0.28: {} + + tldts@7.0.28: + dependencies: + tldts-core: 7.0.28 + + tough-cookie@6.0.1: + dependencies: + tldts: 7.0.28 + + tr46@6.0.0: + dependencies: + punycode: 2.3.1 + + trim-lines@3.0.1: {} + + trough@2.2.0: {} + + tslib@2.8.1: {} + typescript@5.9.3: {} + uc.micro@2.1.0: {} + undici-types@7.16.0: {} + undici@7.25.0: {} + + unicode-emoji-modifier-base@1.0.0: {} + + unified@11.0.5: + dependencies: + '@types/unist': 3.0.3 + bail: 2.0.2 + devlop: 1.1.0 + extend: 3.0.2 + is-plain-obj: 4.1.0 + trough: 2.2.0 + vfile: 6.0.3 + + unist-util-find-after@5.0.0: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.1 + + unist-util-is@6.0.1: + dependencies: + '@types/unist': 3.0.3 + + unist-util-position@5.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-remove-position@5.0.0: + dependencies: + '@types/unist': 3.0.3 + unist-util-visit: 5.1.0 + + unist-util-stringify-position@4.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-visit-parents@6.0.2: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.1 + + unist-util-visit@5.1.0: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.1 + unist-util-visit-parents: 6.0.2 + + uuid@11.1.0: {} + + uuid@13.0.0: {} + + vfile-location@5.0.3: + dependencies: + '@types/unist': 3.0.3 + vfile: 6.0.3 + + vfile-message@4.0.3: + dependencies: + '@types/unist': 3.0.3 + unist-util-stringify-position: 4.0.0 + + vfile@6.0.3: + dependencies: + '@types/unist': 3.0.3 + vfile-message: 4.0.3 + vite@6.4.1(@types/node@24.12.2)(jiti@2.6.1)(lightningcss@1.32.0): dependencies: esbuild: 0.25.12 @@ -1288,4 +4412,30 @@ snapshots: optionalDependencies: vite: 6.4.1(@types/node@24.12.2)(jiti@2.6.1)(lightningcss@1.32.0) + w3c-keyname@2.2.8: {} + + w3c-xmlserializer@5.0.0: + dependencies: + xml-name-validator: 5.0.0 + + web-namespaces@2.0.1: {} + + webidl-conversions@8.0.1: {} + + whatwg-mimetype@5.0.0: {} + + whatwg-url@16.0.1: + dependencies: + '@exodus/bytes': 1.15.0 + tr46: 6.0.0 + webidl-conversions: 8.0.1 + transitivePeerDependencies: + - '@noble/hashes' + + xml-name-validator@5.0.0: {} + + xmlchars@2.2.0: {} + zimmerframe@1.1.4: {} + + zwitch@2.0.4: {} diff --git a/ui/src/App.svelte b/ui/src/App.svelte index 4bad060..eb56ca5 100644 --- a/ui/src/App.svelte +++ b/ui/src/App.svelte @@ -1,5 +1,19 @@ - AMCS + AMCS Admin
-
-
- Avelon Memory Crystal - -
-
-
-
- - Avalon Memory Crystal Server -
-
-

- Avelon Memory Crystal Server (AMCS) -

-

- {data?.description ?? - "AMCS is a memory server that captures, links, and retrieves structured project thoughts for AI assistants using semantic search, summaries, and MCP tools."} -

-
+ {#if !isLoggedIn} +
+
+
+
+ + AMCS Control Interface
- -
- {#each quickLinks as link} - {link.label} - {/each} - {#if data?.oauth_enabled} - OAuth Authorization Server - {/if} -
- -
-
-

- Connected users -

-

- {data?.connected_count ?? "—"} -

-
-
-

- Known principals -

-

- {data?.total_known ?? "—"} -

-
-
-

- Version -

-

- {data?.version ?? "—"} -

-
-
-
- - -
-
- -
-
-
-

Recent access

-

- Authenticated principals AMCS has seen recently. +

Login

+

+ Origin-style operator access for the AMCS admin interface. ResolveSpec OAuth stays the auth brain; + this shell just gives us the front door.

-
- -
- {#if loading} -
- Loading status… -
- {:else if error} -
-

Couldn’t load the status snapshot.

-

{error}

-
- {:else if data && data.entries.length === 0} -
- No authenticated access recorded yet. -
- {:else if data} -
-
- - - - - - - - - - - - {#each data.entries as entry} - - - - - - - - {/each} - -
PrincipalLast accessedLast pathAgentRequests
{entry.key_id}{formatDate(entry.last_accessed_at)}{entry.last_path}{entry.user_agent ?? "—"}{entry.request_count}
+
+
+

Primary module

+

Projects

+

Projects are the first real admin screen in this rollout.

+
+
+

UI direction

+

Origin-like

+

Login and page structure mapped toward Origin patterns.

+
- {/if} -
-
+ +
+

Operator login

+

Authenticate to access AMCS admin pages.

+ +
+ { + handleLogin(value as LoginResult); + }} + onError={(err) => { + authError = String(err || 'Failed to login. Please check your credentials and try again.'); + authMessage = ''; + }} + request="insert" + > + {#snippet children(state)} +
+ + state.setState('values', { ...state.values, username: v })} + /> + + + + state.setState('values', { ...state.values, password: v })} + /> + + + + + {#if authError} +

{authError}

+ {/if} + {#if authMessage} +

{authMessage}

+ {/if} +
+ {/snippet} +
+
+
+ + + {:else} +
+ + +
+ {#if currentPage === 'dashboard'} +
+
+
+

System overview

+

Current AMCS status behind the admin shell.

+
+ +
+ + {#if loading} +
+ Loading status… +
+ {:else if error} +
+

Couldn’t load the status snapshot.

+

{error}

+
+ {:else if data} +
+
+

Connected users

+

{data.connected_count}

+
+
+

Known principals

+

{data.total_known}

+
+
+

Version

+

{data.version}

+
+
+ {/if} +
+ {:else} +
+
+
+

Projects

+

First module scaffold. Grid/Form wiring comes next.

+
+ + Structure phase + +
+ +
+
+

Project grid placeholder

+

+ This is the landing zone for the Origin-style projects grid using Svelix and GridlerFull. + Next pass: wire ResolveSpec-backed project list, row actions, and editor flow. +

+
    +
  • • Project list and search
  • +
  • • Project detail/edit drawer or modal
  • +
  • • Create/archive actions
  • +
  • • Link-outs to related thoughts and skills
  • +
+
+ +
+

Build notes

+
+
+
Auth path
+
ResolveSpec OAuth packages
+
+
+
Page pattern
+
Mapped toward Origin login and shell
+
+
+
First module
+
Projects
+
+
+
+
+
+ {/if} + + {#if data && currentPage === 'dashboard' && data.entries.length > 0} +
+

Recent access

+
+
+ + + + + + + + + + + + {#each data.entries as entry} + + + + + + + + {/each} + +
PrincipalLast accessedLast pathAgentRequests
{entry.key_id}{formatDate(entry.last_accessed_at)}{entry.last_path}{entry.user_agent ?? '—'}{entry.request_count}
+
+
+
+ {/if} +
+
+ {/if}
diff --git a/ui/src/shellState.ts b/ui/src/shellState.ts new file mode 100644 index 0000000..f32d6ac --- /dev/null +++ b/ui/src/shellState.ts @@ -0,0 +1,46 @@ +import { GlobalStateStore } from '@warkypublic/svelix'; + +const normalizeApiURL = (url: string): string => url.replace(/\/+$/, ''); + +const resolveApiURL = (envURL?: string): string => { + const viteEnvURL = + envURL?.trim() || + import.meta.env.VITE_API_URL?.trim() || + import.meta.env.VITE_API_BASE_URL?.trim() || + import.meta.env.VITE_URL?.trim(); + + if (viteEnvURL) return normalizeApiURL(viteEnvURL); + + if (typeof window !== 'undefined') { + return `${window.location.protocol}//${window.location.host}/api`; + } + + const stateURL = GlobalStateStore.getState().session.apiURL?.trim(); + if (stateURL) return normalizeApiURL(stateURL); + + return ''; +}; + +export { GlobalStateStore }; + +export function ensureApiURL(envURL?: string): string { + const resolved = resolveApiURL(envURL); + if (!resolved) return ''; + + const state = GlobalStateStore.getState(); + if (state.session.apiURL !== resolved) { + state.setApiURL(resolved); + } + + return resolved; +} + +export function setCurrentPath(pathname: string): void { + const state = GlobalStateStore.getState(); + const current = state.navigation.currentPage ?? {}; + + state.setCurrentPage({ + ...current, + path: pathname + }); +} From cd14be06667971cb412a035ca207d1ad0b62e79f Mon Sep 17 00:00:00 2001 From: sgcommand Date: Wed, 22 Apr 2026 23:11:37 +0200 Subject: [PATCH 15/15] feat(ui): wire resolvespec oauth login flow --- ui/src/App.svelte | 218 +++++++++++++++++++++++----------------- ui/src/shellState.ts | 230 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 355 insertions(+), 93 deletions(-) diff --git a/ui/src/App.svelte b/ui/src/App.svelte index eb56ca5..520588e 100644 --- a/ui/src/App.svelte +++ b/ui/src/App.svelte @@ -1,19 +1,13 @@