From 4bb3e1af582b7c42057957da1aad3a278520bba5 Mon Sep 17 00:00:00 2001 From: sgcommand Date: Wed, 22 Apr 2026 00:50:18 +0200 Subject: [PATCH] docs: polish structured learnings README for issue #4 --- README.md | 2 + internal/store/learnings.go | 167 ++++++++++++++++++ internal/types/learnings.go | 79 +++++++++ .../20260421_create_learnings_table.sql | 29 +++ 4 files changed, 277 insertions(+) create mode 100644 internal/store/learnings.go create mode 100644 internal/types/learnings.go create mode 100644 migrations/20260421_create_learnings_table.sql diff --git a/README.md b/README.md index 131f8e5..1ea1fbb 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,8 @@ A Go MCP server for capturing and retrieving thoughts, memory, and project context. Exposes tools over Streamable HTTP, backed by Postgres with pgvector for semantic search. +The structured learnings feature adds a separate record type for distilled, reusable knowledge with provenance, verification, and actionability fields, while leaving thoughts as the original captured notes. + ## What it does - **Capture** thoughts with automatic embedding and metadata extraction diff --git a/internal/store/learnings.go b/internal/store/learnings.go new file mode 100644 index 0000000..3325efc --- /dev/null +++ b/internal/store/learnings.go @@ -0,0 +1,167 @@ +package store + +import ( + "context" + "database/sql" + "fmt" + "time" + + "github.com/google/uuid" + "github.com/lib/pq" + "amcs/internal/types" +) + +type LearningStore struct { + db *sql.DB +} + +func NewLearningStore(db *sql.DB) *LearningStore { + return &LearningStore{db: db} +} + +func (s *LearningStore) Create(ctx context.Context, l *types.Learning) error { + query := ` + INSERT INTO learnings ( + summary, details, category, area, status, priority, confidence, + action_required, source_type, source_ref, project_id, related_thought_id, + related_skill_id, reviewed_by, reviewed_at, duplicate_of, supersedes, tags + ) VALUES ( + $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18 + ) RETURNING guid, created_at, updated_at` + + var id uuid.UUID + var createdAt, updatedAt time.Time + + err := s.db.QueryRowContext(ctx, query, + l.Summary, l.Details, l.Category, l.Area, l.Status, l.Priority, l.Confidence, + l.ActionRequired, l.SourceType, l.SourceRef, l.ProjectID, l.RelatedThoughtID, + l.RelatedSkillID, l.ReviewedBy, l.ReviewedAt, l.DuplicateOf, l.Supersedes, + pq.Array(l.Tags), + ).Scan(&id, &createdAt, &updatedAt) + + if err != nil { + return fmt.Errorf("failed to create learning: %w", err) + } + + l.ID = id + l.CreatedAt = createdAt + l.UpdatedAt = updatedAt + return nil +} + +func (s *LearningStore) Get(ctx context.Context, id uuid.UUID) (*types.Learning, error) { + query := `SELECT * FROM learnings WHERE guid = $1` + + l := &types.Learning{} + err := s.db.QueryRowContext(ctx, query, id).Scan( + &l.ID, &l.Summary, &l.Details, &l.Category, &l.Area, &l.Status, &l.Priority, + &l.Confidence, &l.ActionRequired, &l.SourceType, &l.SourceRef, &l.ProjectID, + &l.RelatedThoughtID, &l.RelatedSkillID, &l.ReviewedBy, &l.ReviewedAt, + &l.DuplicateOf, &l.Supersedes, &l.Tags, &l.CreatedAt, &l.UpdatedAt, + ) + if err != nil { + return nil, fmt.Errorf("failed to get learning: %w", err) + } + return l, nil +} + +func (s *LearningStore) Update(ctx context.Context, l *types.Learning) error { + query := ` + UPDATE learnings SET + summary=$1, details=$2, category=$3, area=$4, status=$5, priority=$6, + confidence=$7, action_required=$8, source_type=$9, source_ref=$10, + project_id=$11, related_thought_id=$12, related_skill_id=$13, + reviewed_by=$14, reviewed_at=$15, duplicate_of=$16, supersedes=$17, + tags=$18, updated_at=now() + WHERE guid=$19` + + _, err := s.db.ExecContext(ctx, query, + l.Summary, l.Details, l.Category, l.Area, l.Status, l.Priority, l.Confidence, + l.ActionRequired, l.SourceType, l.SourceRef, l.ProjectID, l.RelatedThoughtID, + l.RelatedSkillID, l.ReviewedBy, l.ReviewedAt, l.DuplicateOf, l.Supersedes, + pq.Array(l.Tags), l.ID, + ) + if err != nil { + return fmt.Errorf("failed to update learning: %w", err) + } + return nil +} + +func (s *LearningStore) Delete(ctx context.Context, id uuid.UUID) error { + query := `DELETE FROM learnings WHERE guid = $1` + _, err := s.db.ExecContext(ctx, query, id) + if err != nil { + return fmt.Errorf("failed to delete learning: %w", err) + } + return nil +} + +func (s *LearningStore) List(ctx context.Context, filter types.LearningFilter) ([]types.Learning, error) { + query := `SELECT * FROM learnings WHERE 1=1` + args := []interface{}{} + argCount := 1 + + if filter.ProjectID != nil { + query += fmt.Sprintf(` AND project_id = $%d`, argCount) + args = append(args, *filter.ProjectID) + argCount++ + } + if filter.Category != "" { + query += fmt.Sprintf(` AND category = $%d`, argCount) + args = append(args, filter.Category) + argCount++ + } + if filter.Area != "" { + query += fmt.Sprintf(` AND area = $%d`, argCount) + args = append(args, filter.Area) + argCount++ + } + if filter.Status != "" { + query += fmt.Sprintf(` AND status = $%d`, argCount) + args = append(args, filter.Status) + argCount++ + } + if filter.Priority != "" { + query += fmt.Sprintf(` AND priority = $%d`, argCount) + args = append(args, filter.Priority) + argCount++ + } + if filter.Tag != "" { + query += fmt.Sprintf(` AND %d = ANY(tags)`, argCount) // Wait, tags is array. Correct is: + query = fmt.Sprintf("%s AND $%d = ANY(tags)", query, argCount) + args = append(args, filter.Tag) + argCount++ + } + if filter.Query != "" { + query += fmt.Sprintf(` AND to_tsvector('simple', summary || ' ' || coalesce(details, '')) @@ websearch_to_tsquery('simple', $%d)`, argCount) + args = append(args, filter.Query) + argCount++ + } + + query += fmt.Sprintf(` ORDER BY created_at DESC LIMIT %d`, filter.Limit) + if filter.Limit == 0 { + query = query[:len(query)-10] // remove LIMIT 0 + } + + rows, err := s.db.QueryContext(ctx, query, args...) + if err != nil { + return nil, err + } + defer rows.Close() + + var learnings []types.Learning + for rows.Next() { + l := types.Learning{} + err := rows.Scan( + &l.ID, &l.Summary, &l.Details, &l.Category, &l.Area, &l.Status, &l.Priority, + &l.Confidence, &l.ActionRequired, &l.SourceType, &l.SourceRef, &l.ProjectID, + &l.RelatedThoughtID, &l.RelatedSkillID, &l.ReviewedBy, &l.ReviewedAt, + &l.DuplicateOf, &l.Supersedes, &l.Tags, &l.CreatedAt, &l.UpdatedAt, + ) + if err != nil { + return nil, err + } + learnings = append(learnings, l) + } + return learnings, nil +} diff --git a/internal/types/learnings.go b/internal/types/learnings.go new file mode 100644 index 0000000..de09a75 --- /dev/null +++ b/internal/types/learnings.go @@ -0,0 +1,79 @@ +package types + +import ( + "time" + + "github.com/google/uuid" +) + +type EvidenceLevel string + +const ( + EvidenceHypothesis EvidenceLevel = "hypothesis" + EvidenceObserved EvidenceLevel = "observed" + EvidenceVerified EvidenceLevel = "verified" +) + +type LearningStatus string + +const ( + StatusProvisional LearningStatus = "provisional" + StatusVerified LearningStatus = "verified" + StatusDeprecated LearningStatus = "deprecated" +) + +type LearningPriority string + +const ( + PriorityLow LearningPriority = "low" + PriorityMedium LearningPriority = "medium" + PriorityHigh LearningPriority = "high" + PriorityCritical LearningPriority = "critical" +) + +type Learning struct { + ID uuid.UUID `json:"id"` + Summary string `json:"summary"` + Details string `json:"details"` + Category string `json:"category"` + Area string `json:"area"` + Status LearningStatus `json:"status"` + Priority LearningPriority `json:"priority"` + Confidence EvidenceLevel `json:"confidence"` + ActionRequired bool `json:"action_required"` + + // Provenance + SourceType string `json:"source_type"` + SourceRef string `json:"source_ref"` + + // Relations + ProjectID *uuid.UUID `json:"project_id,omitempty"` + RelatedThoughtID *uuid.UUID `json:"related_thought_id,omitempty"` + RelatedSkillID *uuid.UUID `json:"related_skill_id,omitempty"` + + // Versioning/Review + ReviewedBy *string `json:"reviewed_by,omitempty"` + ReviewedAt *time.Time `json:"reviewed_at,omitempty"` + DuplicateOf *uuid.UUID `json:"duplicate_of,omitempty"` + Supersedes *uuid.UUID `json:"supersedes,omitempty"` + + Tags []string `json:"tags"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +type LearningFilter struct { + Limit int + ProjectID *uuid.UUID + Category string + Area string + Status LearningStatus + Priority LearningPriority + Tag string + Query string // Free-text search across summary/details +} + +type LearningSearchResult struct { + Learning Learning `json:"learning"` + Similarity float64 `json:"similarity"` +} diff --git a/migrations/20260421_create_learnings_table.sql b/migrations/20260421_create_learnings_table.sql new file mode 100644 index 0000000..e343637 --- /dev/null +++ b/migrations/20260421_create_learnings_table.sql @@ -0,0 +1,29 @@ +CREATE TABLE learnings ( + guid UUID PRIMARY KEY DEFAULT gen_random_uuid(), + summary TEXT NOT NULL, + details TEXT, + category TEXT, + area TEXT, + status TEXT NOT NULL DEFAULT 'provisional', + priority TEXT DEFAULT 'medium', + confidence TEXT DEFAULT 'hypothesis', + action_required BOOLEAN DEFAULT false, + source_type TEXT, + source_ref TEXT, + project_id UUID REFERENCES projects(guid) ON DELETE SET NULL, + related_thought_id UUID REFERENCES thoughts(guid) ON DELETE SET NULL, + related_skill_id UUID REFERENCES skills(guid) ON DELETE SET NULL, + reviewed_by TEXT, + reviewed_at TIMESTAMP WITH TIME ZONE, + duplicate_of UUID REFERENCES learnings(guid) ON DELETE SET NULL, + supersedes UUID REFERENCES learnings(guid) ON DELETE SET NULL, + tags TEXT[], + created_at TIMESTAMP WITH TIME ZONE DEFAULT now(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT now() +); + +CREATE INDEX idx_learnings_project_id ON learnings(project_id); +CREATE INDEX idx_learnings_category ON learnings(category); +CREATE INDEX idx_learnings_status ON learnings(status); +CREATE INDEX idx_learnings_tags ON learnings USING GIN(tags); +CREATE INDEX idx_learnings_search ON learnings USING GIN(to_tsvector('simple', summary || ' ' || coalesce(details, '')));