package app import ( "context" "errors" "fmt" "log/slog" "net/http" "time" "git.warky.dev/wdevs/amcs/internal/ai" "git.warky.dev/wdevs/amcs/internal/auth" "git.warky.dev/wdevs/amcs/internal/config" "git.warky.dev/wdevs/amcs/internal/mcpserver" "git.warky.dev/wdevs/amcs/internal/observability" "git.warky.dev/wdevs/amcs/internal/session" "git.warky.dev/wdevs/amcs/internal/store" "git.warky.dev/wdevs/amcs/internal/tools" ) func Run(ctx context.Context, configPath string) error { cfg, loadedFrom, err := config.Load(configPath) if err != nil { return err } logger, err := observability.NewLogger(cfg.Logging) if err != nil { return err } logger.Info("loaded configuration", slog.String("path", loadedFrom), slog.String("provider", cfg.AI.Provider), ) db, err := store.New(ctx, cfg.Database) if err != nil { return err } defer db.Close() if err := db.VerifyRequirements(ctx); err != nil { return err } httpClient := &http.Client{Timeout: 30 * time.Second} provider, err := ai.NewProvider(cfg.AI, httpClient, logger) if err != nil { return err } keyring, err := auth.NewKeyring(cfg.Auth.Keys) if err != nil { return err } activeProjects := session.NewActiveProjects() logger.Info("database connection verified", slog.String("provider", provider.Name()), ) server := &http.Server{ Addr: fmt.Sprintf("%s:%d", cfg.Server.Host, cfg.Server.Port), Handler: routes(logger, cfg, db, provider, keyring, activeProjects), ReadTimeout: cfg.Server.ReadTimeout, WriteTimeout: cfg.Server.WriteTimeout, IdleTimeout: cfg.Server.IdleTimeout, } errCh := make(chan error, 1) go func() { logger.Info("starting HTTP server", slog.String("addr", server.Addr), slog.String("mcp_path", cfg.MCP.Path), ) if err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) { errCh <- err } }() select { case <-ctx.Done(): shutdownCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() logger.Info("shutting down HTTP server") return server.Shutdown(shutdownCtx) case err := <-errCh: return fmt.Errorf("run server: %w", err) } } func routes(logger *slog.Logger, cfg *config.Config, db *store.DB, provider ai.Provider, keyring *auth.Keyring, activeProjects *session.ActiveProjects) http.Handler { mux := http.NewServeMux() toolSet := mcpserver.ToolSet{ Capture: tools.NewCaptureTool(db, provider, cfg.Capture, activeProjects, logger), Search: tools.NewSearchTool(db, provider, cfg.Search, activeProjects), List: tools.NewListTool(db, cfg.Search, activeProjects), Stats: tools.NewStatsTool(db), Get: tools.NewGetTool(db), Update: tools.NewUpdateTool(db, provider, cfg.Capture, logger), Delete: tools.NewDeleteTool(db), Archive: tools.NewArchiveTool(db), Projects: tools.NewProjectsTool(db, activeProjects), Context: tools.NewContextTool(db, provider, cfg.Search, activeProjects), Recall: tools.NewRecallTool(db, provider, cfg.Search, activeProjects), Summarize: tools.NewSummarizeTool(db, provider, cfg.Search, activeProjects), Links: tools.NewLinksTool(db, provider, cfg.Search), } mcpHandler := mcpserver.New(cfg.MCP, toolSet) mux.Handle(cfg.MCP.Path, auth.Middleware(cfg.Auth, keyring, logger)(mcpHandler)) mux.HandleFunc("/favicon.ico", serveFavicon) mux.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) _, _ = w.Write([]byte("ok")) }) mux.HandleFunc("/readyz", func(w http.ResponseWriter, r *http.Request) { if err := db.Ready(r.Context()); err != nil { logger.Error("readiness check failed", slog.String("error", err.Error())) http.Error(w, "not ready", http.StatusServiceUnavailable) return } w.WriteHeader(http.StatusOK) _, _ = w.Write([]byte("ready")) }) mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) _, _ = w.Write([]byte("amcs is running")) }) return observability.Chain( mux, observability.RequestID(), observability.Recover(logger), observability.AccessLog(logger), observability.Timeout(cfg.Server.WriteTimeout), ) }