feat(files): implement file storage functionality with save, load, and list operations

This commit is contained in:
2026-03-30 22:24:18 +02:00
parent 79d8219836
commit 7f2b2b9fee
12 changed files with 676 additions and 33 deletions

View File

@@ -129,19 +129,20 @@ func routes(logger *slog.Logger, cfg *config.Config, db *store.DB, provider ai.P
mux := http.NewServeMux()
toolSet := mcpserver.ToolSet{
Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger),
Search: tools.NewSearchTool(db, provider, cfg.Search, activeProjects),
List: tools.NewListTool(db, cfg.Search, activeProjects),
Stats: tools.NewStatsTool(db),
Get: tools.NewGetTool(db),
Update: tools.NewUpdateTool(db, provider, cfg.Capture, logger),
Delete: tools.NewDeleteTool(db),
Archive: tools.NewArchiveTool(db),
Projects: tools.NewProjectsTool(db, activeProjects),
Context: tools.NewContextTool(db, provider, cfg.Search, activeProjects),
Recall: tools.NewRecallTool(db, provider, cfg.Search, activeProjects),
Summarize: tools.NewSummarizeTool(db, provider, cfg.Search, activeProjects),
Links: tools.NewLinksTool(db, provider, cfg.Search),
Capture: tools.NewCaptureTool(db, provider, cfg.Capture, cfg.AI.Metadata.Timeout, activeProjects, logger),
Search: tools.NewSearchTool(db, provider, cfg.Search, activeProjects),
List: tools.NewListTool(db, cfg.Search, activeProjects),
Stats: tools.NewStatsTool(db),
Get: tools.NewGetTool(db),
Update: tools.NewUpdateTool(db, provider, cfg.Capture, logger),
Delete: tools.NewDeleteTool(db),
Archive: tools.NewArchiveTool(db),
Projects: tools.NewProjectsTool(db, activeProjects),
Context: tools.NewContextTool(db, provider, cfg.Search, activeProjects),
Recall: tools.NewRecallTool(db, provider, cfg.Search, activeProjects),
Summarize: tools.NewSummarizeTool(db, provider, cfg.Search, activeProjects),
Links: tools.NewLinksTool(db, provider, cfg.Search),
Files: tools.NewFilesTool(db, activeProjects),
Backfill: tools.NewBackfillTool(db, provider, activeProjects, logger),
Reparse: tools.NewReparseMetadataTool(db, provider, cfg.Capture, activeProjects, logger),
Household: tools.NewHouseholdTool(db),

View File

@@ -11,19 +11,20 @@ import (
)
type ToolSet struct {
Capture *tools.CaptureTool
Search *tools.SearchTool
List *tools.ListTool
Stats *tools.StatsTool
Get *tools.GetTool
Update *tools.UpdateTool
Delete *tools.DeleteTool
Archive *tools.ArchiveTool
Projects *tools.ProjectsTool
Context *tools.ContextTool
Recall *tools.RecallTool
Summarize *tools.SummarizeTool
Links *tools.LinksTool
Capture *tools.CaptureTool
Search *tools.SearchTool
List *tools.ListTool
Stats *tools.StatsTool
Get *tools.GetTool
Update *tools.UpdateTool
Delete *tools.DeleteTool
Archive *tools.ArchiveTool
Projects *tools.ProjectsTool
Context *tools.ContextTool
Recall *tools.RecallTool
Summarize *tools.SummarizeTool
Links *tools.LinksTool
Files *tools.FilesTool
Backfill *tools.BackfillTool
Reparse *tools.ReparseMetadataTool
Household *tools.HouseholdTool
@@ -124,6 +125,21 @@ func New(cfg config.MCPConfig, toolSet ToolSet) http.Handler {
Description: "Retrieve explicit links and semantic neighbors for a thought.",
}, toolSet.Links.Related)
addTool(server, &mcp.Tool{
Name: "save_file",
Description: "Store a base64-encoded file such as an image, document, or audio clip, optionally linking it to a thought.",
}, toolSet.Files.Save)
addTool(server, &mcp.Tool{
Name: "load_file",
Description: "Load a previously stored file by id and return its metadata and base64 content.",
}, toolSet.Files.Load)
addTool(server, &mcp.Tool{
Name: "list_files",
Description: "List stored files, optionally filtered by thought, project, or kind.",
}, toolSet.Files.List)
addTool(server, &mcp.Tool{
Name: "backfill_embeddings",
Description: "Generate missing embeddings for stored thoughts using the active embedding model.",

View File

@@ -35,6 +35,7 @@ func Fallback(capture config.CaptureConfig) thoughttypes.ThoughtMetadata {
Topics: []string{topicFallback},
Type: normalizeType(capture.MetadataDefaults.Type),
Source: normalizeSource(capture.Source),
Attachments: []thoughttypes.ThoughtAttachment{},
}
}
@@ -46,6 +47,7 @@ func Normalize(in thoughttypes.ThoughtMetadata, capture config.CaptureConfig) th
Topics: normalizeList(in.Topics, maxTopics),
Type: normalizeType(in.Type),
Source: normalizeSource(in.Source),
Attachments: normalizeAttachments(in.Attachments),
}
if len(out.Topics) == 0 {
@@ -127,10 +129,42 @@ func Merge(base, patch thoughttypes.ThoughtMetadata, capture config.CaptureConfi
if strings.TrimSpace(patch.Source) != "" {
merged.Source = patch.Source
}
if len(patch.Attachments) > 0 {
merged.Attachments = append(append([]thoughttypes.ThoughtAttachment{}, merged.Attachments...), patch.Attachments...)
}
return Normalize(merged, capture)
}
func normalizeAttachments(values []thoughttypes.ThoughtAttachment) []thoughttypes.ThoughtAttachment {
seen := make(map[string]struct{}, len(values))
result := make([]thoughttypes.ThoughtAttachment, 0, len(values))
for _, value := range values {
if value.FileID.String() == "" || value.FileID.String() == "00000000-0000-0000-0000-000000000000" {
continue
}
key := value.FileID.String()
if _, ok := seen[key]; ok {
continue
}
value.Name = strings.TrimSpace(value.Name)
value.MediaType = strings.TrimSpace(value.MediaType)
value.Kind = strings.TrimSpace(value.Kind)
if value.SizeBytes < 0 {
value.SizeBytes = 0
}
value.SHA256 = strings.TrimSpace(value.SHA256)
seen[key] = struct{}{}
result = append(result, value)
}
return result
}
func SortedTopCounts(in map[string]int, limit int) []thoughttypes.KeyCount {
out := make([]thoughttypes.KeyCount, 0, len(in))
for key, count := range in {

View File

@@ -4,6 +4,8 @@ import (
"strings"
"testing"
"github.com/google/uuid"
"git.warky.dev/wdevs/amcs/internal/config"
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
)
@@ -79,3 +81,24 @@ func TestMergeAddsPatchAndNormalizes(t *testing.T) {
t.Fatalf("Topics len = %d, want 2", len(got.Topics))
}
}
func TestNormalizeDedupesAttachmentsByFileID(t *testing.T) {
id := uuid.New()
got := Normalize(thoughttypes.ThoughtMetadata{
Attachments: []thoughttypes.ThoughtAttachment{
{FileID: id, Name: " one.png ", MediaType: " image/png ", Kind: " image ", SizeBytes: 12, SHA256: " abc "},
{FileID: id, Name: "two.png", MediaType: "image/png", Kind: "image", SizeBytes: 99, SHA256: "def"},
},
}, testCaptureConfig())
if len(got.Attachments) != 1 {
t.Fatalf("Attachments len = %d, want 1", len(got.Attachments))
}
if got.Attachments[0].Name != "one.png" {
t.Fatalf("Attachment name = %q, want one.png", got.Attachments[0].Name)
}
if got.Attachments[0].Kind != "image" {
t.Fatalf("Attachment kind = %q, want image", got.Attachments[0].Kind)
}
}

View File

@@ -92,5 +92,13 @@ func (db *DB) VerifyRequirements(ctx context.Context) error {
return fmt.Errorf("embeddings table is missing — run migrations")
}
var hasStoredFiles bool
if err := db.pool.QueryRow(ctx, `select exists(select 1 from pg_tables where schemaname = 'public' and tablename = 'stored_files')`).Scan(&hasStoredFiles); err != nil {
return fmt.Errorf("verify stored_files table: %w", err)
}
if !hasStoredFiles {
return fmt.Errorf("stored_files table is missing — run migrations")
}
return nil
}

191
internal/store/files.go Normal file
View File

@@ -0,0 +1,191 @@
package store
import (
"context"
"encoding/json"
"fmt"
"strings"
"github.com/google/uuid"
"github.com/jackc/pgx/v5"
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
)
func (db *DB) InsertStoredFile(ctx context.Context, file thoughttypes.StoredFile) (thoughttypes.StoredFile, error) {
row := db.pool.QueryRow(ctx, `
insert into stored_files (thought_id, project_id, name, media_type, kind, encoding, size_bytes, sha256, content)
values ($1, $2, $3, $4, $5, $6, $7, $8, $9)
returning guid, thought_id, project_id, name, media_type, kind, encoding, size_bytes, sha256, created_at, updated_at
`, file.ThoughtID, file.ProjectID, file.Name, file.MediaType, file.Kind, file.Encoding, file.SizeBytes, file.SHA256, file.Content)
var created thoughttypes.StoredFile
if err := row.Scan(
&created.ID,
&created.ThoughtID,
&created.ProjectID,
&created.Name,
&created.MediaType,
&created.Kind,
&created.Encoding,
&created.SizeBytes,
&created.SHA256,
&created.CreatedAt,
&created.UpdatedAt,
); err != nil {
return thoughttypes.StoredFile{}, fmt.Errorf("insert stored file: %w", err)
}
return created, nil
}
func (db *DB) GetStoredFile(ctx context.Context, id uuid.UUID) (thoughttypes.StoredFile, error) {
row := db.pool.QueryRow(ctx, `
select guid, thought_id, project_id, name, media_type, kind, encoding, size_bytes, sha256, content, created_at, updated_at
from stored_files
where guid = $1
`, id)
var file thoughttypes.StoredFile
if err := row.Scan(
&file.ID,
&file.ThoughtID,
&file.ProjectID,
&file.Name,
&file.MediaType,
&file.Kind,
&file.Encoding,
&file.SizeBytes,
&file.SHA256,
&file.Content,
&file.CreatedAt,
&file.UpdatedAt,
); err != nil {
if err == pgx.ErrNoRows {
return thoughttypes.StoredFile{}, err
}
return thoughttypes.StoredFile{}, fmt.Errorf("get stored file: %w", err)
}
return file, nil
}
func (db *DB) ListStoredFiles(ctx context.Context, filter thoughttypes.StoredFileFilter) ([]thoughttypes.StoredFile, error) {
args := make([]any, 0, 4)
conditions := make([]string, 0, 3)
if filter.ThoughtID != nil {
args = append(args, *filter.ThoughtID)
conditions = append(conditions, fmt.Sprintf("thought_id = $%d", len(args)))
}
if filter.ProjectID != nil {
args = append(args, *filter.ProjectID)
conditions = append(conditions, fmt.Sprintf("project_id = $%d", len(args)))
}
if kind := strings.TrimSpace(filter.Kind); kind != "" {
args = append(args, kind)
conditions = append(conditions, fmt.Sprintf("kind = $%d", len(args)))
}
query := `
select guid, thought_id, project_id, name, media_type, kind, encoding, size_bytes, sha256, created_at, updated_at
from stored_files
`
if len(conditions) > 0 {
query += " where " + strings.Join(conditions, " and ")
}
args = append(args, filter.Limit)
query += fmt.Sprintf(" order by created_at desc limit $%d", len(args))
rows, err := db.pool.Query(ctx, query, args...)
if err != nil {
return nil, fmt.Errorf("list stored files: %w", err)
}
defer rows.Close()
files := make([]thoughttypes.StoredFile, 0, filter.Limit)
for rows.Next() {
var file thoughttypes.StoredFile
if err := rows.Scan(
&file.ID,
&file.ThoughtID,
&file.ProjectID,
&file.Name,
&file.MediaType,
&file.Kind,
&file.Encoding,
&file.SizeBytes,
&file.SHA256,
&file.CreatedAt,
&file.UpdatedAt,
); err != nil {
return nil, fmt.Errorf("scan stored file: %w", err)
}
files = append(files, file)
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("iterate stored files: %w", err)
}
return files, nil
}
func (db *DB) AddThoughtAttachment(ctx context.Context, thoughtID uuid.UUID, attachment thoughttypes.ThoughtAttachment) error {
tx, err := db.pool.Begin(ctx)
if err != nil {
return fmt.Errorf("begin transaction: %w", err)
}
defer tx.Rollback(ctx)
var metadataBytes []byte
if err := tx.QueryRow(ctx, `select metadata from thoughts where guid = $1 for update`, thoughtID).Scan(&metadataBytes); err != nil {
if err == pgx.ErrNoRows {
return err
}
return fmt.Errorf("load thought metadata: %w", err)
}
var metadata thoughttypes.ThoughtMetadata
if len(metadataBytes) > 0 {
if err := json.Unmarshal(metadataBytes, &metadata); err != nil {
return fmt.Errorf("decode thought metadata: %w", err)
}
}
replaced := false
for i := range metadata.Attachments {
if metadata.Attachments[i].FileID == attachment.FileID {
metadata.Attachments[i] = attachment
replaced = true
break
}
}
if !replaced {
metadata.Attachments = append(metadata.Attachments, attachment)
}
updatedMetadata, err := json.Marshal(metadata)
if err != nil {
return fmt.Errorf("encode thought metadata: %w", err)
}
tag, err := tx.Exec(ctx, `
update thoughts
set metadata = $2::jsonb,
updated_at = now()
where guid = $1
`, thoughtID, updatedMetadata)
if err != nil {
return fmt.Errorf("update thought attachments: %w", err)
}
if tag.RowsAffected() == 0 {
return pgx.ErrNoRows
}
if err := tx.Commit(ctx); err != nil {
return fmt.Errorf("commit attachment update: %w", err)
}
return nil
}

276
internal/tools/files.go Normal file
View File

@@ -0,0 +1,276 @@
package tools
import (
"context"
"crypto/sha256"
"encoding/base64"
"encoding/hex"
"net/http"
"strings"
"github.com/google/uuid"
"github.com/modelcontextprotocol/go-sdk/mcp"
"git.warky.dev/wdevs/amcs/internal/session"
"git.warky.dev/wdevs/amcs/internal/store"
thoughttypes "git.warky.dev/wdevs/amcs/internal/types"
)
type FilesTool struct {
store *store.DB
sessions *session.ActiveProjects
}
type SaveFileInput struct {
Name string `json:"name" jsonschema:"file name including extension, for example photo.png or note.pdf"`
ContentBase64 string `json:"content_base64" jsonschema:"file contents encoded as base64"`
MediaType string `json:"media_type,omitempty" jsonschema:"optional MIME type such as image/png, application/pdf, or audio/mpeg"`
Kind string `json:"kind,omitempty" jsonschema:"optional logical type such as image, document, audio, or file"`
ThoughtID string `json:"thought_id,omitempty" jsonschema:"optional thought id to link this file to"`
Project string `json:"project,omitempty" jsonschema:"optional project name or id when saving outside a linked thought"`
}
type SaveFileOutput struct {
File thoughttypes.StoredFile `json:"file"`
}
type LoadFileInput struct {
ID string `json:"id" jsonschema:"the stored file id"`
}
type LoadFileOutput struct {
File thoughttypes.StoredFile `json:"file"`
ContentBase64 string `json:"content_base64"`
}
type ListFilesInput struct {
Limit int `json:"limit,omitempty" jsonschema:"maximum number of files to return"`
ThoughtID string `json:"thought_id,omitempty" jsonschema:"optional thought id to list files for"`
Project string `json:"project,omitempty" jsonschema:"optional project name or id to scope the listing"`
Kind string `json:"kind,omitempty" jsonschema:"optional kind filter such as image, document, audio, or file"`
}
type ListFilesOutput struct {
Files []thoughttypes.StoredFile `json:"files"`
}
func NewFilesTool(db *store.DB, sessions *session.ActiveProjects) *FilesTool {
return &FilesTool{store: db, sessions: sessions}
}
func (t *FilesTool) Save(ctx context.Context, req *mcp.CallToolRequest, in SaveFileInput) (*mcp.CallToolResult, SaveFileOutput, error) {
name := strings.TrimSpace(in.Name)
if name == "" {
return nil, SaveFileOutput{}, errInvalidInput("name is required")
}
contentBase64, mediaTypeFromDataURL := splitDataURL(strings.TrimSpace(in.ContentBase64))
if contentBase64 == "" {
return nil, SaveFileOutput{}, errInvalidInput("content_base64 is required")
}
content, err := decodeBase64(contentBase64)
if err != nil {
return nil, SaveFileOutput{}, errInvalidInput("content_base64 must be valid base64")
}
if len(content) == 0 {
return nil, SaveFileOutput{}, errInvalidInput("decoded file content must not be empty")
}
project, err := resolveProject(ctx, t.store, t.sessions, req, in.Project, false)
if err != nil {
return nil, SaveFileOutput{}, err
}
var thoughtID *uuid.UUID
var projectID = projectIDPtr(project)
if rawThoughtID := strings.TrimSpace(in.ThoughtID); rawThoughtID != "" {
parsedThoughtID, err := parseUUID(rawThoughtID)
if err != nil {
return nil, SaveFileOutput{}, err
}
thought, err := t.store.GetThought(ctx, parsedThoughtID)
if err != nil {
return nil, SaveFileOutput{}, err
}
thoughtID = &parsedThoughtID
projectID = thought.ProjectID
if project != nil && thought.ProjectID != nil && *thought.ProjectID != project.ID {
return nil, SaveFileOutput{}, errInvalidInput("project does not match the linked thought's project")
}
}
mediaType := normalizeMediaType(strings.TrimSpace(in.MediaType), mediaTypeFromDataURL, content)
kind := normalizeFileKind(strings.TrimSpace(in.Kind), mediaType)
sum := sha256.Sum256(content)
file := thoughttypes.StoredFile{
Name: name,
MediaType: mediaType,
Kind: kind,
Encoding: "base64",
SizeBytes: int64(len(content)),
SHA256: hex.EncodeToString(sum[:]),
Content: content,
ProjectID: projectID,
}
if thoughtID != nil {
file.ThoughtID = thoughtID
}
created, err := t.store.InsertStoredFile(ctx, file)
if err != nil {
return nil, SaveFileOutput{}, err
}
if created.ThoughtID != nil {
if err := t.store.AddThoughtAttachment(ctx, *created.ThoughtID, thoughtAttachmentFromFile(created)); err != nil {
return nil, SaveFileOutput{}, err
}
}
if created.ProjectID != nil {
_ = t.store.TouchProject(ctx, *created.ProjectID)
}
return nil, SaveFileOutput{File: created}, nil
}
func (t *FilesTool) Load(ctx context.Context, _ *mcp.CallToolRequest, in LoadFileInput) (*mcp.CallToolResult, LoadFileOutput, error) {
id, err := parseUUID(in.ID)
if err != nil {
return nil, LoadFileOutput{}, err
}
file, err := t.store.GetStoredFile(ctx, id)
if err != nil {
return nil, LoadFileOutput{}, err
}
return nil, LoadFileOutput{
File: file,
ContentBase64: base64.StdEncoding.EncodeToString(file.Content),
}, nil
}
func (t *FilesTool) List(ctx context.Context, req *mcp.CallToolRequest, in ListFilesInput) (*mcp.CallToolResult, ListFilesOutput, error) {
project, err := resolveProject(ctx, t.store, t.sessions, req, in.Project, false)
if err != nil {
return nil, ListFilesOutput{}, err
}
var thoughtID *uuid.UUID
if rawThoughtID := strings.TrimSpace(in.ThoughtID); rawThoughtID != "" {
parsedThoughtID, err := parseUUID(rawThoughtID)
if err != nil {
return nil, ListFilesOutput{}, err
}
thought, err := t.store.GetThought(ctx, parsedThoughtID)
if err != nil {
return nil, ListFilesOutput{}, err
}
thoughtID = &parsedThoughtID
if project != nil && thought.ProjectID != nil && *thought.ProjectID != project.ID {
return nil, ListFilesOutput{}, errInvalidInput("project does not match the linked thought's project")
}
if project == nil && thought.ProjectID != nil {
project = &thoughttypes.Project{ID: *thought.ProjectID}
}
}
files, err := t.store.ListStoredFiles(ctx, thoughttypes.StoredFileFilter{
Limit: normalizeFileLimit(in.Limit),
ThoughtID: thoughtID,
ProjectID: projectIDPtr(project),
Kind: strings.TrimSpace(in.Kind),
})
if err != nil {
return nil, ListFilesOutput{}, err
}
if project != nil {
_ = t.store.TouchProject(ctx, project.ID)
}
return nil, ListFilesOutput{Files: files}, nil
}
func thoughtAttachmentFromFile(file thoughttypes.StoredFile) thoughttypes.ThoughtAttachment {
return thoughttypes.ThoughtAttachment{
FileID: file.ID,
Name: file.Name,
MediaType: file.MediaType,
Kind: file.Kind,
SizeBytes: file.SizeBytes,
SHA256: file.SHA256,
}
}
func splitDataURL(value string) (contentBase64 string, mediaType string) {
const marker = ";base64,"
if !strings.HasPrefix(value, "data:") {
return value, ""
}
prefix, payload, ok := strings.Cut(value, marker)
if !ok {
return value, ""
}
mediaType = strings.TrimPrefix(prefix, "data:")
return payload, strings.TrimSpace(mediaType)
}
func decodeBase64(value string) ([]byte, error) {
decoded, err := base64.StdEncoding.DecodeString(value)
if err == nil {
return decoded, nil
}
return base64.RawStdEncoding.DecodeString(value)
}
func normalizeMediaType(explicit string, fromDataURL string, content []byte) string {
switch {
case explicit != "":
return explicit
case fromDataURL != "":
return fromDataURL
default:
return http.DetectContentType(content)
}
}
func normalizeFileKind(explicit string, mediaType string) string {
if explicit != "" {
return explicit
}
switch {
case strings.HasPrefix(mediaType, "image/"):
return "image"
case strings.HasPrefix(mediaType, "audio/"):
return "audio"
case strings.HasPrefix(mediaType, "video/"):
return "video"
case mediaType == "application/pdf" || strings.HasPrefix(mediaType, "text/") || strings.Contains(mediaType, "document"):
return "document"
default:
return "file"
}
}
func projectIDPtr(project *thoughttypes.Project) *uuid.UUID {
if project == nil {
return nil
}
return &project.ID
}
func normalizeFileLimit(limit int) int {
switch {
case limit <= 0:
return 20
case limit > 100:
return 100
default:
return limit
}
}

View File

@@ -66,6 +66,7 @@ func (t *UpdateTool) Handle(ctx context.Context, _ *mcp.CallToolRequest, in Upda
t.log.Warn("metadata extraction failed during update, keeping current metadata", slog.String("error", extractErr.Error()))
} else {
mergedMetadata = metadata.Normalize(extracted, t.capture)
mergedMetadata.Attachments = current.Metadata.Attachments
}
}

View File

@@ -7,12 +7,44 @@ import (
)
type ThoughtMetadata struct {
People []string `json:"people"`
ActionItems []string `json:"action_items"`
DatesMentioned []string `json:"dates_mentioned"`
Topics []string `json:"topics"`
Type string `json:"type"`
Source string `json:"source"`
People []string `json:"people"`
ActionItems []string `json:"action_items"`
DatesMentioned []string `json:"dates_mentioned"`
Topics []string `json:"topics"`
Type string `json:"type"`
Source string `json:"source"`
Attachments []ThoughtAttachment `json:"attachments,omitempty"`
}
type ThoughtAttachment struct {
FileID uuid.UUID `json:"file_id"`
Name string `json:"name"`
MediaType string `json:"media_type"`
Kind string `json:"kind,omitempty"`
SizeBytes int64 `json:"size_bytes"`
SHA256 string `json:"sha256,omitempty"`
}
type StoredFile struct {
ID uuid.UUID `json:"id"`
ThoughtID *uuid.UUID `json:"thought_id,omitempty"`
ProjectID *uuid.UUID `json:"project_id,omitempty"`
Name string `json:"name"`
MediaType string `json:"media_type"`
Kind string `json:"kind"`
Encoding string `json:"encoding"`
SizeBytes int64 `json:"size_bytes"`
SHA256 string `json:"sha256"`
Content []byte `json:"-"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
type StoredFileFilter struct {
Limit int
ThoughtID *uuid.UUID
ProjectID *uuid.UUID
Kind string
}
type Thought struct {