From 4526a267bf17587041dd9ec208798531ea43324c Mon Sep 17 00:00:00 2001 From: Claude Code Date: Wed, 8 Oct 2025 23:51:56 +1100 Subject: [PATCH] feat(whoosh): add /metrics + admin health alias; serve UI assets at root; ui: auth token control, spinner fix, delete repo/project, correct repo POST payload --- internal/server/server.go | 2461 +++++++++++++++++++++++++++---------- ui/index.html | 297 +---- ui/script.js | 1152 ++++++++--------- ui/styles.css | 634 ++++------ 4 files changed, 2559 insertions(+), 1985 deletions(-) diff --git a/internal/server/server.go b/internal/server/server.go index 8f5976c..1632b52 100644 --- a/internal/server/server.go +++ b/internal/server/server.go @@ -3,14 +3,17 @@ package server import ( "bytes" "context" + "database/sql" "encoding/json" "fmt" "io" "net/http" "os" "path/filepath" + "regexp" "strconv" "strings" + "sync" "time" "github.com/chorus-services/whoosh/internal/agents" @@ -32,6 +35,9 @@ import ( "github.com/go-chi/cors" "github.com/go-chi/render" "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgconn" + "github.com/rs/zerolog" "github.com/rs/zerolog/log" "go.opentelemetry.io/otel/attribute" ) @@ -39,28 +45,46 @@ import ( // Global version variable set by main package var version = "development" +var ucxlIdentSanitizer = regexp.MustCompile(`[^A-Za-z0-9_.-]`) + +func sanitizeUCXLIdentifier(input string) string { + lowered := strings.ToLower(input) + return ucxlIdentSanitizer.ReplaceAllString(lowered, "-") +} + +func truncateString(input string, limit int) string { + if limit <= 0 || len(input) <= limit { + return input + } + if limit <= 3 { + return input[:limit] + } + return input[:limit-3] + "..." +} + // SetVersion sets the global version variable func SetVersion(v string) { version = v } type Server struct { - config *config.Config - db *database.DB - httpServer *http.Server - router chi.Router - giteaClient *gitea.Client - webhookHandler *gitea.WebhookHandler - authMiddleware *auth.Middleware - rateLimiter *auth.RateLimiter - p2pDiscovery *p2p.Discovery - agentRegistry *agents.Registry - backbeat *backbeat.Integration - teamComposer *composer.Service - councilComposer *council.CouncilComposer - taskService *tasks.Service - giteaIntegration *tasks.GiteaIntegration - repoMonitor *monitor.Monitor + config *config.Config + db *database.DB + httpServer *http.Server + router chi.Router + giteaClient *gitea.Client + webhookHandler *gitea.WebhookHandler + authMiddleware *auth.Middleware + rateLimiter *auth.RateLimiter + p2pDiscovery *p2p.Discovery + p2pBroadcaster *p2p.Broadcaster + agentRegistry *agents.Registry + backbeat *backbeat.Integration + teamComposer *composer.Service + councilComposer *council.CouncilComposer + taskService *tasks.Service + giteaIntegration *tasks.GiteaIntegration + repoMonitor *monitor.Monitor swarmManager *orchestrator.SwarmManager agentDeployer *orchestrator.AgentDeployer scalingController *orchestrator.ScalingController @@ -70,23 +94,29 @@ type Server struct { metricsCollector *orchestrator.ScalingMetricsCollector scalingAPI *orchestrator.ScalingAPI validator *validation.Validator + constraintMu sync.Mutex + rebroadcastMu sync.Mutex + activeBroadcasts map[uuid.UUID]context.CancelFunc + roleProfiles map[string]RoleProfile + startTime time.Time } func NewServer(cfg *config.Config, db *database.DB) (*Server, error) { // Initialize core services taskService := tasks.NewService(db.Pool) giteaIntegration := tasks.NewGiteaIntegration(taskService, gitea.NewClient(cfg.GITEA), nil) - + // Initialize P2P discovery and agent registry p2pDiscovery := p2p.NewDiscovery() + p2pBroadcaster := p2p.NewBroadcaster(p2pDiscovery) agentRegistry := agents.NewRegistry(db.Pool, p2pDiscovery) - + // Initialize team composer teamComposer := composer.NewService(db.Pool, nil) // Use default config - + // Initialize council composer for project kickoffs councilComposer := council.NewCouncilComposer(db.Pool) - + // Initialize Docker Swarm orchestrator services conditionally var swarmManager *orchestrator.SwarmManager var agentDeployer *orchestrator.AgentDeployer @@ -148,7 +178,7 @@ func NewServer(cfg *config.Config, db *database.DB) (*Server, error) { } else { log.Warn().Msg("🐳 Docker integration disabled - scaling system and council agent deployment unavailable") } - + // Initialize repository monitor with team composer, council composer, and agent deployer repoMonitor := monitor.NewMonitor(db.Pool, cfg.GITEA, teamComposer, councilComposer, agentDeployer) @@ -158,8 +188,9 @@ func NewServer(cfg *config.Config, db *database.DB) (*Server, error) { giteaClient: gitea.NewClient(cfg.GITEA), webhookHandler: gitea.NewWebhookHandler(cfg.GITEA.WebhookToken), authMiddleware: auth.NewMiddleware(cfg.Auth.JWTSecret, cfg.Auth.ServiceTokens), - rateLimiter: auth.NewRateLimiter(100, time.Minute), // 100 requests per minute per IP + rateLimiter: auth.NewRateLimiter(100, time.Minute), p2pDiscovery: p2pDiscovery, + p2pBroadcaster: p2pBroadcaster, agentRegistry: agentRegistry, teamComposer: teamComposer, councilComposer: councilComposer, @@ -175,7 +206,10 @@ func NewServer(cfg *config.Config, db *database.DB) (*Server, error) { metricsCollector: metricsCollector, scalingAPI: scalingAPI, validator: validation.NewValidator(), + roleProfiles: defaultRoleProfiles(), } + s.activeBroadcasts = make(map[uuid.UUID]context.CancelFunc) + s.startTime = time.Now() // Initialize BACKBEAT integration if enabled if cfg.BACKBEAT.Enabled { @@ -228,17 +262,35 @@ func (s *Server) setupRouter() { } func (s *Server) setupRoutes() { - // Static file serving for UI assets - uiDir := "./ui" - s.router.Get("/ui/*", s.staticFileHandler(uiDir)) - + // Static file serving for UI assets + uiDir := resolveUIDir() + log.Info().Str("ui_dir", uiDir).Msg("πŸ“¦ WHOOSH serving UI static files") + s.router.Handle("/ui/*", http.StripPrefix("/ui/", http.FileServer(http.Dir(uiDir)))) + + // Root-path static files to avoid '/ui' prefix in URLs + s.router.Get("/styles.css", func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, filepath.Join(resolveUIDir(), "styles.css")) + }) + s.router.Get("/script.js", func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, filepath.Join(resolveUIDir(), "script.js")) + }) + // Optional: serve assets at root as well + s.router.Handle("/assets/*", http.StripPrefix("/", http.FileServer(http.Dir(resolveUIDir())))) + // Root route - serve basic dashboard s.router.Get("/", s.dashboardHandler) - + // Health check endpoints s.router.Get("/health", s.healthHandler) s.router.Get("/health/ready", s.readinessHandler) - + // Metrics endpoint (Prometheus text format) + s.router.Get("/metrics", s.metricsHandler) + + // Back-compat alias for admin health details under /api + // Some UIs or external monitors may call /api/admin/health/details + // even though the canonical route is /admin/health/details + s.router.Get("/api/admin/health/details", s.healthDetailsHandler) + // Admin health endpoint with detailed information s.router.Get("/admin/health/details", s.healthDetailsHandler) @@ -263,16 +315,16 @@ func (s *Server) setupRoutes() { // Project management endpoints r.Route("/projects", func(r chi.Router) { r.Get("/", s.listProjectsHandler) - r.With(s.authMiddleware.AdminRequired).Post("/", s.createProjectHandler) - r.With(s.authMiddleware.AdminRequired).Delete("/{projectID}", s.deleteProjectHandler) - + r.Post("/", s.createProjectHandler) + r.Route("/{projectID}", func(r chi.Router) { r.Get("/", s.getProjectHandler) + r.Delete("/", s.deleteProjectHandler) r.Get("/tasks", s.listProjectTasksHandler) r.Get("/tasks/available", s.listAvailableTasksHandler) r.Get("/repository", s.getProjectRepositoryHandler) r.Post("/analyze", s.analyzeProjectHandler) - + r.Route("/tasks/{taskNumber}", func(r chi.Router) { r.Get("/", s.getProjectTaskHandler) r.Post("/claim", s.claimTaskHandler) @@ -294,7 +346,7 @@ func (s *Server) setupRoutes() { r.Post("/submit", s.slurpSubmitHandler) r.Get("/artifacts/{ucxlAddr}", s.slurpRetrieveHandler) }) - + // Repository monitoring endpoints r.Route("/repositories", func(r chi.Router) { r.Get("/", s.listRepositoriesHandler) @@ -306,15 +358,22 @@ func (s *Server) setupRoutes() { r.With(s.authMiddleware.AdminRequired).Post("/{repoID}/ensure-labels", s.ensureRepositoryLabelsHandler) r.Get("/{repoID}/logs", s.getRepositorySyncLogsHandler) }) - + // Council management endpoints r.Route("/councils", func(r chi.Router) { + r.Get("/", s.listCouncilsHandler) r.Get("/{councilID}", s.getCouncilHandler) - + r.Route("/{councilID}/artifacts", func(r chi.Router) { r.Get("/", s.getCouncilArtifactsHandler) r.With(s.authMiddleware.AdminRequired).Post("/", s.createCouncilArtifactHandler) }) + + // Agent role claiming endpoint + r.Post("/{councilID}/claims", s.handleCouncilRoleClaim) + + // Persona status acknowledgment from CHORUS agents + r.Post("/{councilID}/roles/{roleName}/personas", s.handleCouncilPersonaAck) }) // Scaling system endpoints @@ -358,12 +417,12 @@ func (s *Server) Start(ctx context.Context) error { if err := s.p2pDiscovery.Start(); err != nil { return fmt.Errorf("failed to start P2P discovery: %w", err) } - + // Start agent registry service if err := s.agentRegistry.Start(); err != nil { return fmt.Errorf("failed to start agent registry: %w", err) } - + // Start repository monitoring service if s.repoMonitor != nil { go func() { @@ -373,7 +432,7 @@ func (s *Server) Start(ctx context.Context) error { }() log.Info().Msg("πŸ” Repository monitoring service started") } - + log.Info(). Str("addr", s.httpServer.Addr). Msg("HTTP server starting") @@ -399,12 +458,12 @@ func (s *Server) Shutdown(ctx context.Context) error { if err := s.agentRegistry.Stop(); err != nil { log.Error().Err(err).Msg("Failed to stop agent registry service") } - + // Stop P2P discovery service if err := s.p2pDiscovery.Stop(); err != nil { log.Error().Err(err).Msg("Failed to stop P2P discovery service") } - + // Stop repository monitoring service if s.repoMonitor != nil { s.repoMonitor.Stop() @@ -434,12 +493,12 @@ func (s *Server) healthHandler(w http.ResponseWriter, r *http.Request) { "service": "whoosh", "version": "0.1.0-mvp", } - + // Include BACKBEAT health information if available if s.backbeat != nil { response["backbeat"] = s.backbeat.GetHealth() } - + render.JSON(w, r, response) } @@ -464,32 +523,65 @@ func (s *Server) readinessHandler(w http.ResponseWriter, r *http.Request) { }) } +// metricsHandler exposes a minimal Prometheus text format so the UI +// dashboard can render metrics and external scrapers can poll it. +func (s *Server) metricsHandler(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/plain; version=0.0.4; charset=utf-8") + + uptime := time.Since(s.startTime).Seconds() + + ctx, cancel := context.WithTimeout(r.Context(), 500*time.Millisecond) + defer cancel() + dbHealthy := 0 + if err := s.db.Health(ctx); err == nil { + dbHealthy = 1 + } + + var buf bytes.Buffer + // Service info + fmt.Fprintf(&buf, "# HELP whoosh_info Service information.\n") + fmt.Fprintf(&buf, "# TYPE whoosh_info gauge\n") + fmt.Fprintf(&buf, "whoosh_info{version=\"%s\"} 1\n", version) + + // Uptime + fmt.Fprintf(&buf, "# HELP whoosh_uptime_seconds Uptime of the WHOOSH server.\n") + fmt.Fprintf(&buf, "# TYPE whoosh_uptime_seconds counter\n") + fmt.Fprintf(&buf, "whoosh_uptime_seconds %.0f\n", uptime) + + // Database health + fmt.Fprintf(&buf, "# HELP whoosh_database_healthy Database health status (1=healthy,0=unhealthy).\n") + fmt.Fprintf(&buf, "# TYPE whoosh_database_healthy gauge\n") + fmt.Fprintf(&buf, "whoosh_database_healthy %d\n", dbHealthy) + + _, _ = w.Write(buf.Bytes()) +} + // healthDetailsHandler provides comprehensive system health information func (s *Server) healthDetailsHandler(w http.ResponseWriter, r *http.Request) { ctx, span := tracing.StartSpan(r.Context(), "health_check_details") defer span.End() - + ctx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() - - response := map[string]interface{}{ - "service": "whoosh", - "version": version, - "timestamp": time.Now().Unix(), - "uptime": time.Since(time.Now()).Seconds(), // This would need to be stored at startup - "status": "healthy", - "components": make(map[string]interface{}), - } - + + response := map[string]interface{}{ + "service": "whoosh", + "version": version, + "timestamp": time.Now().Unix(), + "uptime": time.Since(s.startTime).Seconds(), + "status": "healthy", + "components": make(map[string]interface{}), + } + overallHealthy := true components := make(map[string]interface{}) - + // Database Health Check dbHealth := map[string]interface{}{ "name": "database", "type": "postgresql", } - + if err := s.db.Health(ctx); err != nil { dbHealth["status"] = "unhealthy" dbHealth["error"] = err.Error() @@ -499,14 +591,14 @@ func (s *Server) healthDetailsHandler(w http.ResponseWriter, r *http.Request) { } else { dbHealth["status"] = "healthy" dbHealth["last_checked"] = time.Now().Unix() - + // Get database statistics var dbStats map[string]interface{} if stats := s.db.Pool.Stat(); stats != nil { dbStats = map[string]interface{}{ - "max_conns": stats.MaxConns(), - "acquired_conns": stats.AcquiredConns(), - "idle_conns": stats.IdleConns(), + "max_conns": stats.MaxConns(), + "acquired_conns": stats.AcquiredConns(), + "idle_conns": stats.IdleConns(), "constructing_conns": stats.ConstructingConns(), } } @@ -514,13 +606,13 @@ func (s *Server) healthDetailsHandler(w http.ResponseWriter, r *http.Request) { span.SetAttributes(attribute.Bool("health.database.healthy", true)) } components["database"] = dbHealth - + // Gitea Health Check giteaHealth := map[string]interface{}{ "name": "gitea", "type": "external_service", } - + if s.giteaClient != nil { if err := s.giteaClient.TestConnection(ctx); err != nil { giteaHealth["status"] = "unhealthy" @@ -540,13 +632,13 @@ func (s *Server) healthDetailsHandler(w http.ResponseWriter, r *http.Request) { } giteaHealth["last_checked"] = time.Now().Unix() components["gitea"] = giteaHealth - + // BackBeat Health Check backbeatHealth := map[string]interface{}{ "name": "backbeat", "type": "internal_service", } - + if s.backbeat != nil { bbHealth := s.backbeat.GetHealth() if connected, ok := bbHealth["connected"].(bool); ok && connected { @@ -566,13 +658,13 @@ func (s *Server) healthDetailsHandler(w http.ResponseWriter, r *http.Request) { } backbeatHealth["last_checked"] = time.Now().Unix() components["backbeat"] = backbeatHealth - + // Docker Swarm Health Check (if enabled) swarmHealth := map[string]interface{}{ "name": "docker_swarm", "type": "orchestration", } - + if s.config.Docker.Enabled { // Basic Docker connection check - actual swarm health would need Docker client swarmHealth["status"] = "unknown" @@ -583,13 +675,13 @@ func (s *Server) healthDetailsHandler(w http.ResponseWriter, r *http.Request) { } swarmHealth["last_checked"] = time.Now().Unix() components["docker_swarm"] = swarmHealth - + // Repository Monitor Health monitorHealth := map[string]interface{}{ "name": "repository_monitor", "type": "internal_service", } - + if s.repoMonitor != nil { // Get repository monitoring statistics query := `SELECT @@ -598,10 +690,10 @@ func (s *Server) healthDetailsHandler(w http.ResponseWriter, r *http.Request) { COUNT(*) FILTER (WHERE sync_status = 'error') as error_repos, COUNT(*) FILTER (WHERE monitor_issues = true) as monitored_repos FROM repositories` - + var totalRepos, activeRepos, errorRepos, monitoredRepos int err := s.db.Pool.QueryRow(ctx, query).Scan(&totalRepos, &activeRepos, &errorRepos, &monitoredRepos) - + if err != nil { monitorHealth["status"] = "unhealthy" monitorHealth["error"] = err.Error() @@ -622,7 +714,7 @@ func (s *Server) healthDetailsHandler(w http.ResponseWriter, r *http.Request) { } monitorHealth["last_checked"] = time.Now().Unix() components["repository_monitor"] = monitorHealth - + // Overall system status if !overallHealthy { response["status"] = "unhealthy" @@ -636,15 +728,15 @@ func (s *Server) healthDetailsHandler(w http.ResponseWriter, r *http.Request) { attribute.Bool("health.overall_healthy", true), ) } - + response["components"] = components response["healthy"] = overallHealthy - + // Set appropriate HTTP status if !overallHealthy { render.Status(r, http.StatusServiceUnavailable) } - + render.JSON(w, r, response) } @@ -653,22 +745,22 @@ func (s *Server) listTeamsHandler(w http.ResponseWriter, r *http.Request) { // Parse pagination parameters limitStr := r.URL.Query().Get("limit") offsetStr := r.URL.Query().Get("offset") - + limit := 20 // Default limit offset := 0 // Default offset - + if limitStr != "" { if l, err := strconv.Atoi(limitStr); err == nil && l > 0 && l <= 100 { limit = l } } - + if offsetStr != "" { if o, err := strconv.Atoi(offsetStr); err == nil && o >= 0 { offset = o } } - + // Get teams from database teams, total, err := s.teamComposer.ListTeams(r.Context(), limit, offset) if err != nil { @@ -677,7 +769,7 @@ func (s *Server) listTeamsHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "failed to retrieve teams"}) return } - + render.JSON(w, r, map[string]interface{}{ "teams": teams, "total": total, @@ -688,36 +780,36 @@ func (s *Server) listTeamsHandler(w http.ResponseWriter, r *http.Request) { func (s *Server) createTeamHandler(w http.ResponseWriter, r *http.Request) { var taskInput composer.TaskAnalysisInput - + if err := json.NewDecoder(r.Body).Decode(&taskInput); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid request body"}) return } - + // Validate required fields if taskInput.Title == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "title is required"}) return } - + if taskInput.Description == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "description is required"}) return } - + // Set defaults if not provided if taskInput.Priority == "" { taskInput.Priority = composer.PriorityMedium } - + log.Info(). Str("task_title", taskInput.Title). Str("priority", string(taskInput.Priority)). Msg("Starting team composition for new task") - + // Analyze task and compose team result, err := s.teamComposer.AnalyzeAndComposeTeam(r.Context(), &taskInput) if err != nil { @@ -726,7 +818,7 @@ func (s *Server) createTeamHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "team composition failed"}) return } - + // Create the team in database team, err := s.teamComposer.CreateTeam(r.Context(), result.TeamComposition, &taskInput) if err != nil { @@ -735,20 +827,20 @@ func (s *Server) createTeamHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "failed to create team"}) return } - + log.Info(). Str("team_id", team.ID.String()). Str("team_name", team.Name). Float64("confidence_score", result.TeamComposition.ConfidenceScore). Msg("Team created successfully") - + // Return both the team and the composition analysis response := map[string]interface{}{ "team": team, "composition_result": result, - "message": "Team created successfully", + "message": "Team created successfully", } - + render.Status(r, http.StatusCreated) render.JSON(w, r, response) } @@ -761,7 +853,7 @@ func (s *Server) getTeamHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "invalid team ID"}) return } - + team, assignments, err := s.teamComposer.GetTeam(r.Context(), teamID) if err != nil { if err.Error() == "team not found" { @@ -769,18 +861,18 @@ func (s *Server) getTeamHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "team not found"}) return } - + log.Error().Err(err).Str("team_id", teamIDStr).Msg("Failed to get team") render.Status(r, http.StatusInternalServerError) render.JSON(w, r, map[string]string{"error": "failed to retrieve team"}) return } - + response := map[string]interface{}{ "team": team, "assignments": assignments, } - + render.JSON(w, r, response) } @@ -792,39 +884,39 @@ func (s *Server) updateTeamStatusHandler(w http.ResponseWriter, r *http.Request) render.JSON(w, r, map[string]string{"error": "invalid team ID"}) return } - + var statusUpdate struct { Status string `json:"status"` Reason string `json:"reason,omitempty"` } - + if err := json.NewDecoder(r.Body).Decode(&statusUpdate); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid request body"}) return } - + // Validate status values validStatuses := map[string]bool{ - "forming": true, - "active": true, - "completed": true, - "disbanded": true, + "forming": true, + "active": true, + "completed": true, + "disbanded": true, } - + if !validStatuses[statusUpdate.Status] { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid status. Valid values: forming, active, completed, disbanded"}) return } - + // Update team status in database updateQuery := `UPDATE teams SET status = $1, updated_at = $2 WHERE id = $3` - + if statusUpdate.Status == "completed" { updateQuery = `UPDATE teams SET status = $1, updated_at = $2, completed_at = $2 WHERE id = $3` } - + _, err = s.db.Pool.Exec(r.Context(), updateQuery, statusUpdate.Status, time.Now(), teamID) if err != nil { log.Error().Err(err). @@ -835,13 +927,13 @@ func (s *Server) updateTeamStatusHandler(w http.ResponseWriter, r *http.Request) render.JSON(w, r, map[string]string{"error": "failed to update team status"}) return } - + log.Info(). Str("team_id", teamIDStr). Str("status", statusUpdate.Status). Str("reason", statusUpdate.Reason). Msg("Team status updated") - + render.JSON(w, r, map[string]interface{}{ "team_id": teamIDStr, "status": statusUpdate.Status, @@ -851,36 +943,36 @@ func (s *Server) updateTeamStatusHandler(w http.ResponseWriter, r *http.Request) func (s *Server) analyzeTeamCompositionHandler(w http.ResponseWriter, r *http.Request) { var taskInput composer.TaskAnalysisInput - + if err := json.NewDecoder(r.Body).Decode(&taskInput); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid request body"}) return } - + // Validate required fields if taskInput.Title == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "title is required"}) return } - + if taskInput.Description == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "description is required"}) return } - + // Set defaults if not provided if taskInput.Priority == "" { taskInput.Priority = composer.PriorityMedium } - + log.Info(). Str("task_title", taskInput.Title). Str("priority", string(taskInput.Priority)). Msg("Analyzing team composition requirements") - + // Analyze task and compose team (without creating it) result, err := s.teamComposer.AnalyzeAndComposeTeam(r.Context(), &taskInput) if err != nil { @@ -889,13 +981,13 @@ func (s *Server) analyzeTeamCompositionHandler(w http.ResponseWriter, r *http.Re render.JSON(w, r, map[string]string{"error": "team composition analysis failed"}) return } - + log.Info(). Str("analysis_id", result.AnalysisID.String()). Float64("confidence_score", result.TeamComposition.ConfidenceScore). Int("recommended_team_size", result.TeamComposition.EstimatedSize). Msg("Team composition analysis completed") - + render.JSON(w, r, result) } @@ -906,34 +998,34 @@ func (s *Server) listTasksHandler(w http.ResponseWriter, r *http.Request) { repositoryParam := r.URL.Query().Get("repository") limitStr := r.URL.Query().Get("limit") offsetStr := r.URL.Query().Get("offset") - + // Build filter filter := &tasks.TaskFilter{} - + if statusParam != "" && statusParam != "all" { filter.Status = []tasks.TaskStatus{tasks.TaskStatus(statusParam)} } - + if priorityParam != "" { filter.Priority = []tasks.TaskPriority{tasks.TaskPriority(priorityParam)} } - + if repositoryParam != "" { filter.Repository = repositoryParam } - + if limitStr != "" { if limit, err := strconv.Atoi(limitStr); err == nil && limit > 0 && limit <= 100 { filter.Limit = limit } } - + if offsetStr != "" { if offset, err := strconv.Atoi(offsetStr); err == nil && offset >= 0 { filter.Offset = offset } } - + // Get tasks from database taskList, total, err := s.taskService.ListTasks(r.Context(), filter) if err != nil { @@ -942,7 +1034,7 @@ func (s *Server) listTasksHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "failed to retrieve tasks"}) return } - + render.JSON(w, r, map[string]interface{}{ "tasks": taskList, "total": total, @@ -960,60 +1052,60 @@ func (s *Server) ingestTaskHandler(w http.ResponseWriter, r *http.Request) { Labels []string `json:"labels,omitempty"` Source string `json:"source,omitempty"` // "manual", "gitea", "webhook" } - + if err := json.NewDecoder(r.Body).Decode(&taskData); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid request body"}) return } - + // Validate required fields if taskData.Title == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "title is required"}) return } - + if taskData.Description == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "description is required"}) return } - + // Set defaults if taskData.Priority == "" { taskData.Priority = "medium" } - + if taskData.Source == "" { taskData.Source = "manual" } - + // Create task ID taskID := uuid.New().String() - + log.Info(). Str("task_id", taskID). Str("title", taskData.Title). Str("repository", taskData.Repository). Str("source", taskData.Source). Msg("Ingesting new task") - + // For MVP, we'll create the task record and attempt team composition // In production, this would persist to a tasks table and queue for processing - + // Create task in database first createInput := &tasks.CreateTaskInput{ - ExternalID: taskID, // Use generated ID as external ID for manual tasks - ExternalURL: taskData.IssueURL, - SourceType: tasks.SourceType(taskData.Source), - Title: taskData.Title, - Description: taskData.Description, - Priority: tasks.TaskPriority(taskData.Priority), - Repository: taskData.Repository, - Labels: taskData.Labels, + ExternalID: taskID, // Use generated ID as external ID for manual tasks + ExternalURL: taskData.IssueURL, + SourceType: tasks.SourceType(taskData.Source), + Title: taskData.Title, + Description: taskData.Description, + Priority: tasks.TaskPriority(taskData.Priority), + Repository: taskData.Repository, + Labels: taskData.Labels, } - + createdTask, err := s.taskService.CreateTask(r.Context(), createInput) if err != nil { log.Error().Err(err).Str("task_id", taskID).Msg("Failed to create task") @@ -1021,7 +1113,7 @@ func (s *Server) ingestTaskHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "failed to create task"}) return } - + // Convert to TaskAnalysisInput for team composition taskInput := &composer.TaskAnalysisInput{ Title: createdTask.Title, @@ -1037,18 +1129,18 @@ func (s *Server) ingestTaskHandler(w http.ResponseWriter, r *http.Request) { "labels": createdTask.Labels, }, } - + // Start team composition analysis in background for complex tasks // For simple tasks, we can process synchronously - isComplex := len(taskData.Description) > 200 || - len(taskData.Labels) > 3 || - taskData.Priority == "high" || + isComplex := len(taskData.Description) > 200 || + len(taskData.Labels) > 3 || + taskData.Priority == "high" || taskData.Priority == "critical" - + if isComplex { // For complex tasks, start async team composition go s.processTaskAsync(taskID, taskInput) - + // Return immediate response render.Status(r, http.StatusAccepted) render.JSON(w, r, map[string]interface{}{ @@ -1065,7 +1157,7 @@ func (s *Server) ingestTaskHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "task analysis failed"}) return } - + // Create the team team, err := s.teamComposer.CreateTeam(r.Context(), result.TeamComposition, taskInput) if err != nil { @@ -1074,19 +1166,19 @@ func (s *Server) ingestTaskHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "team creation failed"}) return } - + log.Info(). Str("task_id", taskID). Str("team_id", team.ID.String()). Msg("Task ingested and team created") - + render.Status(r, http.StatusCreated) render.JSON(w, r, map[string]interface{}{ "task_id": taskID, "team": team, "composition_result": result, - "status": "completed", - "message": "Task ingested and team created successfully", + "status": "completed", + "message": "Task ingested and team created successfully", }) } } @@ -1099,7 +1191,7 @@ func (s *Server) getTaskHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "invalid task ID format"}) return } - + // Get task from database task, err := s.taskService.GetTask(r.Context(), taskID) if err != nil { @@ -1108,13 +1200,13 @@ func (s *Server) getTaskHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "task not found"}) return } - + log.Error().Err(err).Str("task_id", taskIDStr).Msg("Failed to get task") render.Status(r, http.StatusInternalServerError) render.JSON(w, r, map[string]string{"error": "failed to retrieve task"}) return } - + render.JSON(w, r, map[string]interface{}{ "task": task, }) @@ -1128,26 +1220,26 @@ func (s *Server) slurpSubmitHandler(w http.ResponseWriter, r *http.Request) { Content map[string]interface{} `json:"content"` Metadata map[string]interface{} `json:"metadata,omitempty"` } - + if err := json.NewDecoder(r.Body).Decode(&submission); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid request body"}) return } - + // Validate required fields if submission.TeamID == "" || submission.ArtifactType == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "team_id and artifact_type are required"}) return } - + // Generate UCXL address for the submission - ucxlAddr := fmt.Sprintf("ucxl://%s/%s/%d", - submission.TeamID, - submission.ArtifactType, + ucxlAddr := fmt.Sprintf("ucxl://%s/%s/%d", + submission.TeamID, + submission.ArtifactType, time.Now().Unix()) - + // For MVP, we'll store basic metadata in the database // In production, this would proxy to actual SLURP service teamUUID, err := uuid.Parse(submission.TeamID) @@ -1156,20 +1248,20 @@ func (s *Server) slurpSubmitHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "invalid team_id format"}) return } - + // Store submission record submissionID := uuid.New() metadataJSON, _ := json.Marshal(submission.Metadata) - + insertQuery := ` INSERT INTO slurp_submissions (id, team_id, ucxl_address, artifact_type, metadata, submitted_at, status) VALUES ($1, $2, $3, $4, $5, $6, $7) ` - + _, err = s.db.Pool.Exec(r.Context(), insertQuery, submissionID, teamUUID, ucxlAddr, submission.ArtifactType, metadataJSON, time.Now(), "submitted") - + if err != nil { log.Error().Err(err). Str("team_id", submission.TeamID). @@ -1179,13 +1271,13 @@ func (s *Server) slurpSubmitHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "failed to store submission"}) return } - + log.Info(). Str("team_id", submission.TeamID). Str("artifact_type", submission.ArtifactType). Str("ucxl_address", ucxlAddr). Msg("SLURP submission stored") - + render.Status(r, http.StatusCreated) render.JSON(w, r, map[string]interface{}{ "submission_id": submissionID, @@ -1202,30 +1294,30 @@ func (s *Server) slurpRetrieveHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "ucxl_address query parameter is required"}) return } - + log.Info(). Str("ucxl_address", ucxlAddress). Msg("Retrieving SLURP submission") - + // Query the submission from database query := ` SELECT id, team_id, ucxl_address, artifact_type, metadata, submitted_at, status FROM slurp_submissions WHERE ucxl_address = $1 ` - + row := s.db.Pool.QueryRow(r.Context(), query, ucxlAddress) - + var ( - id uuid.UUID - teamID uuid.UUID + id uuid.UUID + teamID uuid.UUID retrievedAddr string - artifactType string - metadataJSON []byte - submittedAt time.Time - status string + artifactType string + metadataJSON []byte + submittedAt time.Time + status string ) - + err := row.Scan(&id, &teamID, &retrievedAddr, &artifactType, &metadataJSON, &submittedAt, &status) if err != nil { if err.Error() == "no rows in result set" { @@ -1233,7 +1325,7 @@ func (s *Server) slurpRetrieveHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "SLURP submission not found"}) return } - + log.Error().Err(err). Str("ucxl_address", ucxlAddress). Msg("Failed to retrieve SLURP submission") @@ -1241,13 +1333,13 @@ func (s *Server) slurpRetrieveHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "failed to retrieve submission"}) return } - + // Parse metadata var metadata map[string]interface{} if len(metadataJSON) > 0 { json.Unmarshal(metadataJSON, &metadata) } - + submission := map[string]interface{}{ "id": id, "team_id": teamID, @@ -1257,7 +1349,7 @@ func (s *Server) slurpRetrieveHandler(w http.ResponseWriter, r *http.Request) { "submitted_at": submittedAt.Format(time.RFC3339), "status": status, } - + // For MVP, we return the metadata. In production, this would // proxy to SLURP service to retrieve actual artifact content render.JSON(w, r, map[string]interface{}{ @@ -1270,48 +1362,48 @@ func (s *Server) slurpRetrieveHandler(w http.ResponseWriter, r *http.Request) { func (s *Server) listProjectTasksHandler(w http.ResponseWriter, r *http.Request) { projectID := chi.URLParam(r, "projectID") - + log.Info(). Str("project_id", projectID). Msg("Listing tasks for project") - + // For MVP, return mock tasks associated with the project // In production, this would query actual tasks from database tasks := []map[string]interface{}{ { - "id": "task-001", - "project_id": projectID, - "title": "Setup project infrastructure", - "description": "Initialize Docker, CI/CD, and database setup", - "status": "completed", - "priority": "high", + "id": "task-001", + "project_id": projectID, + "title": "Setup project infrastructure", + "description": "Initialize Docker, CI/CD, and database setup", + "status": "completed", + "priority": "high", "assigned_team": nil, - "created_at": time.Now().Add(-48 * time.Hour).Format(time.RFC3339), - "completed_at": time.Now().Add(-12 * time.Hour).Format(time.RFC3339), + "created_at": time.Now().Add(-48 * time.Hour).Format(time.RFC3339), + "completed_at": time.Now().Add(-12 * time.Hour).Format(time.RFC3339), }, { - "id": "task-002", - "project_id": projectID, - "title": "Implement authentication system", - "description": "JWT-based authentication with user management", - "status": "active", - "priority": "high", + "id": "task-002", + "project_id": projectID, + "title": "Implement authentication system", + "description": "JWT-based authentication with user management", + "status": "active", + "priority": "high", "assigned_team": "team-001", - "created_at": time.Now().Add(-24 * time.Hour).Format(time.RFC3339), - "updated_at": time.Now().Add(-2 * time.Hour).Format(time.RFC3339), + "created_at": time.Now().Add(-24 * time.Hour).Format(time.RFC3339), + "updated_at": time.Now().Add(-2 * time.Hour).Format(time.RFC3339), }, { - "id": "task-003", - "project_id": projectID, - "title": "Create API documentation", - "description": "OpenAPI/Swagger documentation for all endpoints", - "status": "queued", - "priority": "medium", + "id": "task-003", + "project_id": projectID, + "title": "Create API documentation", + "description": "OpenAPI/Swagger documentation for all endpoints", + "status": "queued", + "priority": "medium", "assigned_team": nil, - "created_at": time.Now().Add(-6 * time.Hour).Format(time.RFC3339), + "created_at": time.Now().Add(-6 * time.Hour).Format(time.RFC3339), }, } - + render.JSON(w, r, map[string]interface{}{ "project_id": projectID, "tasks": tasks, @@ -1323,54 +1415,54 @@ func (s *Server) listProjectTasksHandler(w http.ResponseWriter, r *http.Request) func (s *Server) listAvailableTasksHandler(w http.ResponseWriter, r *http.Request) { // Get query parameters for filtering skillFilter := r.URL.Query().Get("skills") - priorityFilter := r.URL.Query().Get("priority") - + priorityFilter := r.URL.Query().Get("priority") + log.Info(). Str("skill_filter", skillFilter). Str("priority_filter", priorityFilter). Msg("Listing available tasks") - + // For MVP, return mock available tasks that agents can claim // In production, this would query unassigned tasks from database availableTasks := []map[string]interface{}{ { - "id": "task-004", - "title": "Fix memory leak in user service", - "description": "Investigate and fix memory leak causing high memory usage", - "status": "available", - "priority": "high", + "id": "task-004", + "title": "Fix memory leak in user service", + "description": "Investigate and fix memory leak causing high memory usage", + "status": "available", + "priority": "high", "skills_required": []string{"go", "debugging", "performance"}, "estimated_hours": 8, - "repository": "example/user-service", - "created_at": time.Now().Add(-3 * time.Hour).Format(time.RFC3339), + "repository": "example/user-service", + "created_at": time.Now().Add(-3 * time.Hour).Format(time.RFC3339), }, { - "id": "task-005", - "title": "Add rate limiting to API", - "description": "Implement rate limiting middleware for API endpoints", - "status": "available", - "priority": "medium", + "id": "task-005", + "title": "Add rate limiting to API", + "description": "Implement rate limiting middleware for API endpoints", + "status": "available", + "priority": "medium", "skills_required": []string{"go", "middleware", "api"}, "estimated_hours": 4, - "repository": "example/api-gateway", - "created_at": time.Now().Add(-1 * time.Hour).Format(time.RFC3339), + "repository": "example/api-gateway", + "created_at": time.Now().Add(-1 * time.Hour).Format(time.RFC3339), }, { - "id": "task-006", - "title": "Update React components", - "description": "Migrate legacy class components to functional components", - "status": "available", - "priority": "low", + "id": "task-006", + "title": "Update React components", + "description": "Migrate legacy class components to functional components", + "status": "available", + "priority": "low", "skills_required": []string{"react", "javascript", "frontend"}, "estimated_hours": 12, - "repository": "example/web-ui", - "created_at": time.Now().Add(-30 * time.Minute).Format(time.RFC3339), + "repository": "example/web-ui", + "created_at": time.Now().Add(-30 * time.Minute).Format(time.RFC3339), }, } - + // Apply filtering if specified filteredTasks := availableTasks - + if priorityFilter != "" { filtered := []map[string]interface{}{} for _, task := range availableTasks { @@ -1380,10 +1472,10 @@ func (s *Server) listAvailableTasksHandler(w http.ResponseWriter, r *http.Reques } filteredTasks = filtered } - + render.JSON(w, r, map[string]interface{}{ "available_tasks": filteredTasks, - "total": len(filteredTasks), + "total": len(filteredTasks), "filters": map[string]string{ "skills": skillFilter, "priority": priorityFilter, @@ -1410,25 +1502,25 @@ func (s *Server) claimTaskHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "invalid task ID format"}) return } - + var claimData struct { - TeamID string `json:"team_id"` + TeamID string `json:"team_id"` AgentID string `json:"agent_id,omitempty"` Reason string `json:"reason,omitempty"` } - + if err := json.NewDecoder(r.Body).Decode(&claimData); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid request body"}) return } - + if claimData.TeamID == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "team_id is required"}) return } - + // Validate team exists teamUUID, err := uuid.Parse(claimData.TeamID) if err != nil { @@ -1436,7 +1528,7 @@ func (s *Server) claimTaskHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "invalid team_id format"}) return } - + // Check if team exists _, _, err = s.teamComposer.GetTeam(r.Context(), teamUUID) if err != nil { @@ -1445,13 +1537,13 @@ func (s *Server) claimTaskHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "team not found"}) return } - + log.Error().Err(err).Str("team_id", claimData.TeamID).Msg("Failed to validate team") render.Status(r, http.StatusInternalServerError) render.JSON(w, r, map[string]string{"error": "failed to validate team"}) return } - + // Parse agent ID if provided var agentUUID *uuid.UUID if claimData.AgentID != "" { @@ -1463,7 +1555,7 @@ func (s *Server) claimTaskHandler(w http.ResponseWriter, r *http.Request) { } agentUUID = &agentID } - + // Assign task to team/agent assignment := &tasks.TaskAssignment{ TaskID: taskID, @@ -1471,7 +1563,7 @@ func (s *Server) claimTaskHandler(w http.ResponseWriter, r *http.Request) { AgentID: agentUUID, Reason: claimData.Reason, } - + err = s.taskService.AssignTask(r.Context(), assignment) if err != nil { log.Error().Err(err).Str("task_id", taskIDStr).Msg("Failed to assign task") @@ -1479,13 +1571,13 @@ func (s *Server) claimTaskHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "failed to assign task"}) return } - + log.Info(). Str("task_id", taskIDStr). Str("team_id", claimData.TeamID). Str("agent_id", claimData.AgentID). Msg("Task assigned to team") - + render.JSON(w, r, map[string]interface{}{ "task_id": taskIDStr, "team_id": claimData.TeamID, @@ -1509,36 +1601,36 @@ func (s *Server) completeTaskHandler(w http.ResponseWriter, r *http.Request) { func (s *Server) listAgentsHandler(w http.ResponseWriter, r *http.Request) { // Get discovered CHORUS agents from P2P discovery discoveredAgents := s.p2pDiscovery.GetAgents() - + // Convert to API format agents := make([]map[string]interface{}, 0, len(discoveredAgents)) onlineCount := 0 idleCount := 0 offlineCount := 0 - + for _, agent := range discoveredAgents { agentData := map[string]interface{}{ - "id": agent.ID, - "name": agent.Name, - "status": agent.Status, - "capabilities": agent.Capabilities, - "model": agent.Model, - "endpoint": agent.Endpoint, - "last_seen": agent.LastSeen.Format(time.RFC3339), + "id": agent.ID, + "name": agent.Name, + "status": agent.Status, + "capabilities": agent.Capabilities, + "model": agent.Model, + "endpoint": agent.Endpoint, + "last_seen": agent.LastSeen.Format(time.RFC3339), "tasks_completed": agent.TasksCompleted, - "p2p_addr": agent.P2PAddr, - "cluster_id": agent.ClusterID, + "p2p_addr": agent.P2PAddr, + "cluster_id": agent.ClusterID, } - + // Add current team if present if agent.CurrentTeam != "" { agentData["current_team"] = agent.CurrentTeam } else { agentData["current_team"] = nil } - + agents = append(agents, agentData) - + // Count status switch agent.Status { case "online": @@ -1567,20 +1659,20 @@ func (s *Server) registerAgentHandler(w http.ResponseWriter, r *http.Request) { EndpointURL string `json:"endpoint_url"` Capabilities map[string]interface{} `json:"capabilities"` } - + if err := json.NewDecoder(r.Body).Decode(&agentData); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid request body"}) return } - + // Validate required fields if agentData.Name == "" || agentData.EndpointURL == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "name and endpoint_url are required"}) return } - + // Create agent record agent := &composer.Agent{ ID: uuid.New(), @@ -1593,39 +1685,39 @@ func (s *Server) registerAgentHandler(w http.ResponseWriter, r *http.Request) { CreatedAt: time.Now(), UpdatedAt: time.Now(), } - + // Initialize empty capabilities if none provided if agent.Capabilities == nil { agent.Capabilities = make(map[string]interface{}) } - + // Insert into database capabilitiesJSON, _ := json.Marshal(agent.Capabilities) metricsJSON, _ := json.Marshal(agent.PerformanceMetrics) - + query := ` INSERT INTO agents (id, name, endpoint_url, capabilities, status, last_seen, performance_metrics, created_at, updated_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) ` - + _, err := s.db.Pool.Exec(r.Context(), query, agent.ID, agent.Name, agent.EndpointURL, capabilitiesJSON, agent.Status, agent.LastSeen, metricsJSON, agent.CreatedAt, agent.UpdatedAt) - + if err != nil { log.Error().Err(err).Str("agent_name", agent.Name).Msg("Failed to register agent") render.Status(r, http.StatusInternalServerError) render.JSON(w, r, map[string]string{"error": "failed to register agent"}) return } - + log.Info(). Str("agent_id", agent.ID.String()). Str("agent_name", agent.Name). Str("endpoint", agent.EndpointURL). Msg("Agent registered successfully") - + render.Status(r, http.StatusCreated) render.JSON(w, r, map[string]interface{}{ "agent": agent, @@ -1641,19 +1733,19 @@ func (s *Server) updateAgentStatusHandler(w http.ResponseWriter, r *http.Request render.JSON(w, r, map[string]string{"error": "invalid agent ID"}) return } - + var statusUpdate struct { Status string `json:"status"` PerformanceMetrics map[string]interface{} `json:"performance_metrics,omitempty"` Reason string `json:"reason,omitempty"` } - + if err := json.NewDecoder(r.Body).Decode(&statusUpdate); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid request body"}) return } - + // Validate status values validStatuses := map[string]bool{ "available": true, @@ -1661,20 +1753,20 @@ func (s *Server) updateAgentStatusHandler(w http.ResponseWriter, r *http.Request "idle": true, "offline": true, } - + if !validStatuses[statusUpdate.Status] { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid status. Valid values: available, busy, idle, offline"}) return } - + // Update agent status and last_seen timestamp updateQuery := ` UPDATE agents SET status = $1, last_seen = $2, updated_at = $2 WHERE id = $3 ` - + _, err = s.db.Pool.Exec(r.Context(), updateQuery, statusUpdate.Status, time.Now(), agentID) if err != nil { log.Error().Err(err). @@ -1685,27 +1777,27 @@ func (s *Server) updateAgentStatusHandler(w http.ResponseWriter, r *http.Request render.JSON(w, r, map[string]string{"error": "failed to update agent status"}) return } - + // Update performance metrics if provided if statusUpdate.PerformanceMetrics != nil { metricsJSON, _ := json.Marshal(statusUpdate.PerformanceMetrics) - _, err = s.db.Pool.Exec(r.Context(), + _, err = s.db.Pool.Exec(r.Context(), `UPDATE agents SET performance_metrics = $1 WHERE id = $2`, metricsJSON, agentID) - + if err != nil { log.Warn().Err(err). Str("agent_id", agentIDStr). Msg("Failed to update agent performance metrics") } } - + log.Info(). Str("agent_id", agentIDStr). Str("status", statusUpdate.Status). Str("reason", statusUpdate.Reason). Msg("Agent status updated") - + render.JSON(w, r, map[string]interface{}{ "agent_id": agentIDStr, "status": statusUpdate.Status, @@ -1716,29 +1808,57 @@ func (s *Server) updateAgentStatusHandler(w http.ResponseWriter, r *http.Request // Project Management Handlers func (s *Server) listProjectsHandler(w http.ResponseWriter, r *http.Request) { - // For MVP, return hardcoded projects list - // In full implementation, this would query database - projects := []map[string]interface{}{ - { - "id": "whoosh-001", - "name": "WHOOSH", - "repo_url": "https://gitea.chorus.services/tony/WHOOSH", - "description": "Autonomous AI Development Teams Architecture", - "tech_stack": []string{"Go", "Docker", "PostgreSQL"}, - "status": "active", - "created_at": "2025-09-04T00:00:00Z", - "team_size": 3, - }, - { - "id": "chorus-001", - "name": "CHORUS", - "repo_url": "https://gitea.chorus.services/tony/CHORUS", - "description": "AI Agent P2P Coordination System", - "tech_stack": []string{"Go", "P2P", "LibP2P"}, - "status": "active", - "created_at": "2025-09-03T00:00:00Z", - "team_size": 2, - }, + ctx := r.Context() + + // Query councils table (which stores project data) + rows, err := s.db.Pool.Query(ctx, ` + SELECT id, project_name, repository, project_brief, status, created_at, metadata + FROM councils + ORDER BY created_at DESC + `) + if err != nil { + log.Error().Err(err).Msg("Failed to query councils") + render.Status(r, http.StatusInternalServerError) + render.JSON(w, r, map[string]string{"error": "failed to fetch projects"}) + return + } + defer rows.Close() + + projects := []map[string]interface{}{} + for rows.Next() { + var id string + var name, repo, description, status string + var createdAt time.Time + var metadata []byte + + if err := rows.Scan(&id, &name, &repo, &description, &status, &createdAt, &metadata); err != nil { + log.Error().Err(err).Msg("Failed to scan council row") + continue + } + + project := map[string]interface{}{ + "id": id, + "name": name, + "repo_url": repo, + "description": description, + "status": status, + "created_at": createdAt.Format(time.RFC3339), + } + + // Parse metadata if available + if metadata != nil { + var meta map[string]interface{} + if err := json.Unmarshal(metadata, &meta); err == nil { + if lang, ok := meta["language"].(string); ok { + project["language"] = lang + } + if owner, ok := meta["owner"].(string); ok { + project["owner"] = owner + } + } + } + + projects = append(projects, project) } render.JSON(w, r, map[string]interface{}{ @@ -1759,56 +1879,168 @@ func (s *Server) listProjectsHandler(w http.ResponseWriter, r *http.Request) { // returning in-memory data. The database integration is prepared in the docker-compose // but not yet implemented in the handlers. func (s *Server) createProjectHandler(w http.ResponseWriter, r *http.Request) { - // Parse and validate request using secure validation - var reqData map[string]interface{} - + ctx := r.Context() + + // Parse request - now only requires repository_url + var reqData struct { + RepositoryURL string `json:"repository_url"` + } + if err := s.validator.DecodeAndValidateJSON(r, &reqData); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid JSON payload"}) return } - // Validate request using comprehensive validation - if errors := validation.ValidateProjectRequest(reqData); !s.validator.ValidateAndRespond(w, r, errors) { + // Validate repository URL + if reqData.RepositoryURL == "" { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "repository_url is required"}) return } - // Extract validated fields - name := validation.SanitizeString(reqData["name"].(string)) - repoURL := validation.SanitizeString(reqData["repo_url"].(string)) - description := "" - if desc, exists := reqData["description"]; exists && desc != nil { - description = validation.SanitizeString(desc.(string)) + // Parse repository URL to extract owner and repo name + // Expected format: https://gitea.chorus.services/owner/repo + repoURL := validation.SanitizeString(reqData.RepositoryURL) + parts := strings.Split(strings.TrimSuffix(repoURL, ".git"), "/") + if len(parts) < 2 { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "invalid repository URL format"}) + return } - // Generate unique project ID using Unix timestamp. In production, this would be - // a proper UUID or database auto-increment, but for MVP simplicity, timestamp-based - // IDs are sufficient and provide natural ordering. - projectID := fmt.Sprintf("proj-%d", time.Now().Unix()) - - // Project data structure matches the expected format for the frontend UI. - // Status "created" indicates the project is registered but not yet analyzed. - // This will be updated to "analyzing" -> "completed" by the N8N workflow. - project := map[string]interface{}{ - "id": projectID, - "name": name, - "repo_url": repoURL, - "description": description, - "status": "created", - "created_at": time.Now().Format(time.RFC3339), - "team_size": 0, // Will be populated after N8N analysis - } + repoName := parts[len(parts)-1] + owner := parts[len(parts)-2] - // Structured logging with zerolog provides excellent performance and - // searchability in production environments. Include key identifiers - // for debugging and audit trails. + // Fetch repository metadata from GITEA log.Info(). - Str("project_id", projectID). - Str("repo_url", repoURL). - Msg("Created new project") + Str("owner", owner). + Str("repo", repoName). + Msg("Fetching repository metadata from GITEA") + + repo, err := s.giteaClient.GetRepository(ctx, owner, repoName) + if err != nil { + log.Error(). + Err(err). + Str("owner", owner). + Str("repo", repoName). + Msg("Failed to fetch repository from GITEA") + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": fmt.Sprintf("failed to fetch repository: %v", err)}) + return + } + + log.Info(). + Str("repo_url", repoURL). + Str("title", repo.Name). + Str("description", repo.Description). + Msg("Creating new council from GITEA repository") + + // Prepare metadata with owner and language info + metadata := map[string]interface{}{ + "owner": repo.Owner.Login, + "language": repo.Language, + } + + // Create council formation request + formationRequest := &council.CouncilFormationRequest{ + ProjectName: repo.Name, + Repository: repoURL, + ProjectBrief: repo.Description, + Metadata: metadata, + } + + log.Info(). + Str("project", repo.Name). + Msg("🎭 Triggering council formation workflow") + + // Form council (this creates the council record and agents) + composition, err := s.councilComposer.FormCouncil(ctx, formationRequest) + if err != nil { + log.Error(). + Err(err). + Str("project", repo.Name). + Msg("Failed to form council") + render.Status(r, http.StatusInternalServerError) + render.JSON(w, r, map[string]string{"error": fmt.Sprintf("failed to form council: %v", err)}) + return + } + + log.Info(). + Str("council_id", composition.CouncilID.String()). + Str("project", repo.Name). + Msg("βœ… Council formation completed successfully") + + // Broadcast council opportunity to CHORUS agents via P2P + go func() { + broadcastCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + // Prepare council roles for broadcasting + coreRoles := make([]p2p.CouncilRole, len(composition.CoreAgents)) + for i, agent := range composition.CoreAgents { + coreRoles[i] = p2p.CouncilRole{ + RoleName: agent.RoleName, + AgentName: agent.AgentName, + Required: agent.Required, + Description: fmt.Sprintf("Core council role: %s", agent.AgentName), + } + } + + optionalRoles := make([]p2p.CouncilRole, len(composition.OptionalAgents)) + for i, agent := range composition.OptionalAgents { + optionalRoles[i] = p2p.CouncilRole{ + RoleName: agent.RoleName, + AgentName: agent.AgentName, + Required: agent.Required, + Description: fmt.Sprintf("Optional council role: %s", agent.AgentName), + } + } + + // Create opportunity broadcast + opportunity := &p2p.CouncilOpportunity{ + CouncilID: composition.CouncilID, + ProjectName: repo.Name, + Repository: repoURL, + ProjectBrief: repo.Description, + CoreRoles: coreRoles, + OptionalRoles: optionalRoles, + UCXLAddress: fmt.Sprintf("ucxl://team:council@project:%s:council/councils/%s", sanitizeUCXLIdentifier(repo.Name), composition.CouncilID.String()), + FormationDeadline: time.Now().Add(24 * time.Hour), // 24 hours to form council + CreatedAt: composition.CreatedAt, + Metadata: metadata, + } + + // Broadcast to all CHORUS agents + err := s.p2pBroadcaster.BroadcastCouncilOpportunity(broadcastCtx, opportunity) + if err != nil { + log.Error(). + Err(err). + Str("council_id", composition.CouncilID.String()). + Msg("Failed to broadcast council opportunity to CHORUS agents") + } else { + log.Info(). + Str("council_id", composition.CouncilID.String()). + Int("core_roles", len(coreRoles)). + Int("optional_roles", len(optionalRoles)). + Msg("πŸ“‘ Successfully broadcast council opportunity to CHORUS agents") + + s.startCouncilRebroadcastMonitor(opportunity) + } + }() + + // Create response project object + project := map[string]interface{}{ + "id": composition.CouncilID.String(), + "name": repo.Name, + "repo_url": repoURL, + "description": repo.Description, + "owner": repo.Owner.Login, + "language": repo.Language, + "status": "forming", + "created_at": time.Now().Format(time.RFC3339), + } - // Return 201 Created with the project data. The frontend will use this - // response to update the UI and potentially trigger immediate analysis. render.Status(r, http.StatusCreated) render.JSON(w, r, project) } @@ -1821,42 +2053,114 @@ func (s *Server) createProjectHandler(w http.ResponseWriter, r *http.Request) { // URL path rather than query parameters, following REST conventions where the // resource identifier is part of the path structure. func (s *Server) deleteProjectHandler(w http.ResponseWriter, r *http.Request) { - // Extract project ID from URL path parameter. Chi router handles the parsing - // and validation of the URL structure, so we can safely assume this exists - // if the route matched. + ctx := r.Context() + + // Extract project ID from URL path parameter projectID := chi.URLParam(r, "projectID") - - // Log the deletion for audit purposes. In a production system, you'd want - // to track who deleted what and when for compliance and debugging. + + // Parse UUID + councilID, err := uuid.Parse(projectID) + if err != nil { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "invalid project ID"}) + return + } + + // Delete from councils table + result, err := s.db.Pool.Exec(ctx, ` + DELETE FROM councils WHERE id = $1 + `, councilID) + + if err != nil { + log.Error(). + Err(err). + Str("council_id", councilID.String()). + Msg("Failed to delete council from database") + render.Status(r, http.StatusInternalServerError) + render.JSON(w, r, map[string]string{"error": "failed to delete project"}) + return + } + + // Check if council was found and deleted + if result.RowsAffected() == 0 { + render.Status(r, http.StatusNotFound) + render.JSON(w, r, map[string]string{"error": "project not found"}) + return + } + log.Info(). - Str("project_id", projectID). - Msg("Deleted project") - + Str("council_id", councilID.String()). + Msg("Deleted council") + render.JSON(w, r, map[string]string{"message": "project deleted"}) } // getProjectHandler handles GET /api/projects/{projectID} requests to retrieve // detailed information about a specific project, including its analysis results // and team formation recommendations from the N8N workflow. -// -// Implementation decision: We return mock data for now since database persistence -// isn't implemented yet. In production, this would query PostgreSQL for the -// actual project record and its associated analysis results. func (s *Server) getProjectHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() projectID := chi.URLParam(r, "projectID") - - // TODO: Replace with database query - this mock data demonstrates the expected - // response structure that the frontend UI will consume. The tech_stack and - // team_size fields would be populated by N8N workflow analysis results. + + // Parse UUID + councilID, err := uuid.Parse(projectID) + if err != nil { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "invalid project ID"}) + return + } + + // Query councils table + var id string + var name, repo, description, status string + var createdAt time.Time + var metadata []byte + + err = s.db.Pool.QueryRow(ctx, ` + SELECT id, project_name, repository, project_brief, status, created_at, metadata + FROM councils + WHERE id = $1 + `, councilID).Scan(&id, &name, &repo, &description, &status, &createdAt, &metadata) + + if err != nil { + if err == pgx.ErrNoRows { + render.Status(r, http.StatusNotFound) + render.JSON(w, r, map[string]string{"error": "project not found"}) + return + } + log.Error().Err(err).Str("council_id", councilID.String()).Msg("Failed to query council") + render.Status(r, http.StatusInternalServerError) + render.JSON(w, r, map[string]string{"error": "internal server error"}) + return + } + + // Build project response project := map[string]interface{}{ - "id": projectID, - "name": "Sample Project", - "repo_url": "https://gitea.chorus.services/tony/" + projectID, - "description": "Sample project description", - "tech_stack": []string{"Go", "JavaScript"}, // From N8N analysis - "status": "active", - "created_at": "2025-09-04T00:00:00Z", - "team_size": 2, // From N8N team formation recommendations + "id": id, + "name": name, + "repo_url": repo, + "description": description, + "status": status, + "created_at": createdAt.Format(time.RFC3339), + } + + // Parse metadata for additional fields + if metadata != nil { + var meta map[string]interface{} + if err := json.Unmarshal(metadata, &meta); err == nil { + if lang, ok := meta["language"].(string); ok { + project["language"] = lang + } + if owner, ok := meta["owner"].(string); ok { + project["owner"] = owner + } + if techStack, ok := meta["tech_stack"].([]interface{}); ok { + project["tech_stack"] = techStack + } + if teamSize, ok := meta["team_size"].(float64); ok { + project["team_size"] = int(teamSize) + } + } } render.JSON(w, r, project) @@ -1876,7 +2180,7 @@ func (s *Server) getProjectHandler(w http.ResponseWriter, r *http.Request) { // WHOOSH UI β†’ N8N workflow β†’ LLM analysis β†’ team formation recommendations func (s *Server) analyzeProjectHandler(w http.ResponseWriter, r *http.Request) { projectID := chi.URLParam(r, "projectID") - + // Project data structure for N8N payload. In production, this would be fetched // from the database using the projectID, but for MVP we allow it to be provided // in the request body or fall back to predictable mock data. @@ -1884,7 +2188,7 @@ func (s *Server) analyzeProjectHandler(w http.ResponseWriter, r *http.Request) { RepoURL string `json:"repo_url"` Name string `json:"name"` } - + // Handle both scenarios: explicit project data in request body (for testing) // and implicit data fetching (for production UI). This flexibility makes the // API easier to test manually while supporting the intended UI workflow. @@ -1928,26 +2232,26 @@ func (s *Server) analyzeProjectHandler(w http.ResponseWriter, r *http.Request) { if s.backbeat != nil { s.backbeat.UpdateSearchPhase(searchID, backbeat.PhaseQuerying, 0) } - + // HTTP client with generous timeout because: // 1. N8N workflow fetches multiple files from repository // 2. LLM analysis (Ollama) can take 10-30 seconds depending on model size // 3. Network latency between services in Docker Swarm // 60 seconds provides buffer while still failing fast for real issues client := &http.Client{Timeout: 60 * time.Second} - + // Payload structure matches the N8N workflow webhook expectations. // The workflow expects these exact field names to properly route data // through the file fetching and analysis nodes. payload := map[string]interface{}{ - "repo_url": projectData.RepoURL, // Primary input for file fetching - "project_name": projectData.Name, // Used in LLM analysis context + "repo_url": projectData.RepoURL, // Primary input for file fetching + "project_name": projectData.Name, // Used in LLM analysis context } - + // JSON marshaling without error checking is acceptable here because we control // the payload structure and know it will always be valid JSON. payloadBytes, _ := json.Marshal(payload) - + // Call to configurable N8N instance for team formation workflow // The webhook URL is constructed from the base URL in configuration n8nWebhookURL := s.config.N8N.BaseURL + "/webhook/team-formation" @@ -1956,14 +2260,14 @@ func (s *Server) analyzeProjectHandler(w http.ResponseWriter, r *http.Request) { "application/json", bytes.NewBuffer(payloadBytes), ) - + // Network-level error handling (connection refused, timeout, DNS issues) if err != nil { log.Error().Err(err).Msg("Failed to trigger N8N workflow") return fmt.Errorf("failed to trigger N8N workflow: %w", err) } defer resp.Body.Close() - + // HTTP-level error handling (N8N returned an error status) if resp.StatusCode != http.StatusOK { log.Error(). @@ -1971,12 +2275,12 @@ func (s *Server) analyzeProjectHandler(w http.ResponseWriter, r *http.Request) { Msg("N8N workflow returned error") return fmt.Errorf("N8N workflow returned status %d", resp.StatusCode) } - + // Update BACKBEAT phase to ranking if s.backbeat != nil { s.backbeat.UpdateSearchPhase(searchID, backbeat.PhaseRanking, 0) } - + // Read the N8N workflow response, which contains the team formation analysis // results including detected technologies, complexity scores, and agent assignments. body, err := io.ReadAll(resp.Body) @@ -1984,16 +2288,16 @@ func (s *Server) analyzeProjectHandler(w http.ResponseWriter, r *http.Request) { log.Error().Err(err).Msg("Failed to read N8N response") return fmt.Errorf("failed to read N8N response: %w", err) } - + // Parse and return N8N response if err := json.Unmarshal(body, &analysisResult); err != nil { log.Error().Err(err).Msg("Failed to parse N8N response") return fmt.Errorf("failed to parse N8N response: %w", err) } - + return nil } - + // Execute analysis with BACKBEAT beat budget or fallback to direct execution var analysisErr error if s.backbeat != nil { @@ -2004,29 +2308,29 @@ func (s *Server) analyzeProjectHandler(w http.ResponseWriter, r *http.Request) { } else { analysisErr = analysisFunc() } - + if analysisErr != nil { render.Status(r, http.StatusInternalServerError) render.JSON(w, r, map[string]string{"error": analysisErr.Error()}) return } - + // Complete BACKBEAT search tracking if s.backbeat != nil { s.backbeat.CompleteSearch(searchID, 1) } - + log.Info(). Str("project_id", projectID). Msg("πŸ” Project analysis completed successfully with BACKBEAT tracking") - + render.JSON(w, r, analysisResult) } func (s *Server) giteaWebhookHandler(w http.ResponseWriter, r *http.Request) { ctx, span := tracing.StartWebhookSpan(r.Context(), "gitea_webhook", "gitea") defer span.End() - + // Parse webhook payload payload, err := s.webhookHandler.ParsePayload(r) if err != nil { @@ -2036,7 +2340,7 @@ func (s *Server) giteaWebhookHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "invalid payload"}) return } - + span.SetAttributes( attribute.String("webhook.action", payload.Action), attribute.String("webhook.repository", payload.Repository.FullName), @@ -2058,7 +2362,7 @@ func (s *Server) giteaWebhookHandler(w http.ResponseWriter, r *http.Request) { attribute.Bool("webhook.has_task_info", true), attribute.String("webhook.task_type", event.TaskInfo["task_type"].(string)), ) - + log.Info(). Interface("task_info", event.TaskInfo). Msg("Processing task issue") @@ -2069,7 +2373,7 @@ func (s *Server) giteaWebhookHandler(w http.ResponseWriter, r *http.Request) { } else { span.SetAttributes(attribute.Bool("webhook.has_task_info", false)) } - + span.SetAttributes( attribute.String("webhook.status", "processed"), attribute.Int64("webhook.timestamp", event.Timestamp), @@ -2100,7 +2404,7 @@ func (s *Server) handleTaskWebhook(ctx context.Context, event *gitea.WebhookEven if event.Action == "opened" || event.Action == "reopened" { taskType := event.TaskInfo["task_type"].(string) priority := event.TaskInfo["priority"].(string) - + log.Info(). Str("task_type", taskType). Str("priority", priority). @@ -2109,37 +2413,12 @@ func (s *Server) handleTaskWebhook(ctx context.Context, event *gitea.WebhookEven } // staticFileHandler serves static files from the UI directory -func (s *Server) staticFileHandler(uiDir string) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - // Extract the file path from the URL - urlPath := r.URL.Path - filePath := strings.TrimPrefix(urlPath, "/ui/") - - // Security check: prevent directory traversal - if strings.Contains(filePath, "..") { - http.Error(w, "Invalid file path", http.StatusBadRequest) - return - } - - // Construct full file path - fullPath := filepath.Join(uiDir, filePath) - - // Check if file exists - if _, err := os.Stat(fullPath); os.IsNotExist(err) { - http.NotFound(w, r) - return - } - - // Serve the file - http.ServeFile(w, r, fullPath) - } -} func (s *Server) dashboardHandler(w http.ResponseWriter, r *http.Request) { - // Serve the external index.html file - uiDir := "./ui" - indexPath := filepath.Join(uiDir, "index.html") - + // Serve the external index.html file + uiDir := resolveUIDir() + indexPath := filepath.Join(uiDir, "index.html") + // Check if the UI directory and index.html exist if _, err := os.Stat(indexPath); os.IsNotExist(err) { // Fallback to embedded HTML if external files don't exist @@ -2165,27 +2444,37 @@ func (s *Server) dashboardHandler(w http.ResponseWriter, r *http.Request) { `)) return } - + // Serve the external index.html file http.ServeFile(w, r, indexPath) } // getCurrentDir returns the current working directory for debugging func getCurrentDir() string { - dir, err := os.Getwd() - if err != nil { - return "unknown" - } - return dir + dir, err := os.Getwd() + if err != nil { + return "unknown" + } + return dir } + +// resolveUIDir determines the directory to serve the UI from. +// It uses WHOOSH_UI_DIR if set, otherwise falls back to ./ui. +func resolveUIDir() string { + if v := strings.TrimSpace(os.Getenv("WHOOSH_UI_DIR")); v != "" { + return v + } + return "./ui" +} + // backbeatStatusHandler provides real-time BACKBEAT pulse data func (s *Server) backbeatStatusHandler(w http.ResponseWriter, r *http.Request) { now := time.Now() - + // Get real BACKBEAT data if integration is available and started if s.backbeat != nil { health := s.backbeat.GetHealth() - + // Extract real BACKBEAT data currentBeat := int64(0) if beatVal, ok := health["current_beat"]; ok { @@ -2193,21 +2482,21 @@ func (s *Server) backbeatStatusHandler(w http.ResponseWriter, r *http.Request) { currentBeat = beat } } - + currentTempo := 2 // Default fallback if tempoVal, ok := health["current_tempo"]; ok { if tempo, ok := tempoVal.(int); ok { currentTempo = tempo } } - + connected := false if connVal, ok := health["connected"]; ok { if conn, ok := connVal.(bool); ok { connected = conn } } - + // Determine phase based on BACKBEAT health phase := "normal" if degradationVal, ok := health["local_degradation"]; ok { @@ -2215,27 +2504,27 @@ func (s *Server) backbeatStatusHandler(w http.ResponseWriter, r *http.Request) { phase = "degraded" } } - + // Calculate average interval based on tempo (BPM to milliseconds) averageInterval := 60000 / currentTempo // Convert BPM to milliseconds between beats - + // Determine if current beat is a downbeat (every 4th beat) isDownbeat := currentBeat%4 == 1 currentDownbeat := (currentBeat / 4) + 1 - + response := map[string]interface{}{ "current_beat": currentBeat, "current_downbeat": currentDownbeat, "average_interval": averageInterval, - "phase": phase, - "is_downbeat": isDownbeat, - "tempo": currentTempo, - "connected": connected, - "timestamp": now.Unix(), - "status": "live", - "backbeat_health": health, + "phase": phase, + "is_downbeat": isDownbeat, + "tempo": currentTempo, + "connected": connected, + "timestamp": now.Unix(), + "status": "live", + "backbeat_health": health, } - + w.Header().Set("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(response); err != nil { http.Error(w, "Failed to encode response", http.StatusInternalServerError) @@ -2243,21 +2532,21 @@ func (s *Server) backbeatStatusHandler(w http.ResponseWriter, r *http.Request) { } return } - + // Fallback to basic data if BACKBEAT integration is not available response := map[string]interface{}{ "current_beat": 0, "current_downbeat": 0, "average_interval": 0, - "phase": "disconnected", - "is_downbeat": false, - "tempo": 0, - "connected": false, - "timestamp": now.Unix(), - "status": "no_backbeat", - "error": "BACKBEAT integration not available", + "phase": "disconnected", + "is_downbeat": false, + "tempo": 0, + "connected": false, + "timestamp": now.Unix(), + "status": "no_backbeat", + "error": "BACKBEAT integration not available", } - + w.Header().Set("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(response); err != nil { http.Error(w, "Failed to encode response", http.StatusInternalServerError) @@ -2270,7 +2559,7 @@ func (s *Server) backbeatStatusHandler(w http.ResponseWriter, r *http.Request) { // listRepositoriesHandler returns all monitored repositories func (s *Server) listRepositoriesHandler(w http.ResponseWriter, r *http.Request) { log.Info().Msg("Listing monitored repositories") - + query := ` SELECT id, name, owner, full_name, url, clone_url, ssh_url, source_type, monitor_issues, monitor_pull_requests, enable_chorus_integration, @@ -2279,7 +2568,7 @@ func (s *Server) listRepositoriesHandler(w http.ResponseWriter, r *http.Request) closed_issues_count, total_tasks_created, created_at, updated_at FROM repositories ORDER BY created_at DESC` - + rows, err := s.db.Pool.Query(context.Background(), query) if err != nil { log.Error().Err(err).Msg("Failed to query repositories") @@ -2288,7 +2577,7 @@ func (s *Server) listRepositoriesHandler(w http.ResponseWriter, r *http.Request) return } defer rows.Close() - + repositories := []map[string]interface{}{} for rows.Next() { var id, name, owner, fullName, url, sourceType, defaultBranch, syncStatus string @@ -2298,59 +2587,59 @@ func (s *Server) listRepositoriesHandler(w http.ResponseWriter, r *http.Request) var lastSyncAt *time.Time var createdAt, updatedAt time.Time var openIssues, closedIssues, totalTasks int - + err := rows.Scan(&id, &name, &owner, &fullName, &url, &cloneURL, &sshURL, &sourceType, - &monitorIssues, &monitorPRs, &enableChorus, &description, &defaultBranch, - &isPrivate, &language, &topicsJSON, &lastSyncAt, &syncStatus, &syncError, + &monitorIssues, &monitorPRs, &enableChorus, &description, &defaultBranch, + &isPrivate, &language, &topicsJSON, &lastSyncAt, &syncStatus, &syncError, &openIssues, &closedIssues, &totalTasks, &createdAt, &updatedAt) if err != nil { log.Error().Err(err).Msg("Failed to scan repository row") continue } - + // Parse topics from JSONB var topics []string if err := json.Unmarshal(topicsJSON, &topics); err != nil { log.Error().Err(err).Msg("Failed to unmarshal topics") topics = []string{} // Default to empty slice } - + // Handle nullable lastSyncAt var lastSyncFormatted *string if lastSyncAt != nil { formatted := lastSyncAt.Format(time.RFC3339) lastSyncFormatted = &formatted } - + repo := map[string]interface{}{ - "id": id, - "name": name, - "owner": owner, - "full_name": fullName, - "url": url, - "clone_url": cloneURL, - "ssh_url": sshURL, - "source_type": sourceType, - "monitor_issues": monitorIssues, - "monitor_pull_requests": monitorPRs, + "id": id, + "name": name, + "owner": owner, + "full_name": fullName, + "url": url, + "clone_url": cloneURL, + "ssh_url": sshURL, + "source_type": sourceType, + "monitor_issues": monitorIssues, + "monitor_pull_requests": monitorPRs, "enable_chorus_integration": enableChorus, - "description": description, - "default_branch": defaultBranch, - "is_private": isPrivate, - "language": language, - "topics": topics, - "last_sync_at": lastSyncFormatted, - "sync_status": syncStatus, - "sync_error": syncError, - "open_issues_count": openIssues, - "closed_issues_count": closedIssues, - "total_tasks_created": totalTasks, - "created_at": createdAt.Format(time.RFC3339), - "updated_at": updatedAt.Format(time.RFC3339), + "description": description, + "default_branch": defaultBranch, + "is_private": isPrivate, + "language": language, + "topics": topics, + "last_sync_at": lastSyncFormatted, + "sync_status": syncStatus, + "sync_error": syncError, + "open_issues_count": openIssues, + "closed_issues_count": closedIssues, + "total_tasks_created": totalTasks, + "created_at": createdAt.Format(time.RFC3339), + "updated_at": updatedAt.Format(time.RFC3339), } repositories = append(repositories, repo) } - + render.JSON(w, r, map[string]interface{}{ "repositories": repositories, "count": len(repositories), @@ -2363,30 +2652,30 @@ func (s *Server) createRepositoryHandler(w http.ResponseWriter, r *http.Request) Name string `json:"name"` Owner string `json:"owner"` URL string `json:"url"` - SourceType string `json:"source_type"` - MonitorIssues bool `json:"monitor_issues"` - MonitorPullRequests bool `json:"monitor_pull_requests"` + SourceType string `json:"source_type"` + MonitorIssues bool `json:"monitor_issues"` + MonitorPullRequests bool `json:"monitor_pull_requests"` EnableChorusIntegration bool `json:"enable_chorus_integration"` - Description *string `json:"description"` - DefaultBranch string `json:"default_branch"` - IsPrivate bool `json:"is_private"` - Language *string `json:"language"` - Topics []string `json:"topics"` + Description *string `json:"description"` + DefaultBranch string `json:"default_branch"` + IsPrivate bool `json:"is_private"` + Language *string `json:"language"` + Topics []string `json:"topics"` } - + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid request body"}) return } - + // Validate required fields if req.Name == "" || req.Owner == "" || req.URL == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "name, owner, and url are required"}) return } - + // Set defaults if req.SourceType == "" { req.SourceType = "gitea" @@ -2397,14 +2686,14 @@ func (s *Server) createRepositoryHandler(w http.ResponseWriter, r *http.Request) if req.Topics == nil { req.Topics = []string{} } - + fullName := req.Owner + "/" + req.Name - + log.Info(). Str("repository", fullName). Str("url", req.URL). Msg("Creating new repository monitor") - + query := ` INSERT INTO repositories ( name, owner, full_name, url, source_type, monitor_issues, @@ -2412,7 +2701,7 @@ func (s *Server) createRepositoryHandler(w http.ResponseWriter, r *http.Request) default_branch, is_private, language, topics ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) RETURNING id, created_at` - + // Convert topics slice to JSON for JSONB column topicsJSON, err := json.Marshal(req.Topics) if err != nil { @@ -2429,14 +2718,14 @@ func (s *Server) createRepositoryHandler(w http.ResponseWriter, r *http.Request) req.MonitorIssues, req.MonitorPullRequests, req.EnableChorusIntegration, req.Description, req.DefaultBranch, req.IsPrivate, req.Language, topicsJSON). Scan(&id, &createdAt) - + if err != nil { log.Error().Err(err).Msg("Failed to create repository") render.Status(r, http.StatusInternalServerError) render.JSON(w, r, map[string]string{"error": "failed to create repository"}) return } - + // Automatically create required labels in the Gitea repository // @goal: WHOOSH-LABELS-004 - Automatic label creation on repository addition // WHY: Ensures standardized ecosystem labels are available immediately for issue categorization @@ -2459,7 +2748,7 @@ func (s *Server) createRepositoryHandler(w http.ResponseWriter, r *http.Request) Msg("Successfully created required labels in Gitea repository") } } - + render.Status(r, http.StatusCreated) render.JSON(w, r, map[string]interface{}{ "id": id, @@ -2472,9 +2761,9 @@ func (s *Server) createRepositoryHandler(w http.ResponseWriter, r *http.Request) // getRepositoryHandler returns a specific repository func (s *Server) getRepositoryHandler(w http.ResponseWriter, r *http.Request) { repoID := chi.URLParam(r, "repoID") - + log.Info().Str("repository_id", repoID).Msg("Getting repository details") - + query := ` SELECT id, name, owner, full_name, url, clone_url, ssh_url, source_type, source_config, monitor_issues, monitor_pull_requests, monitor_releases, @@ -2484,7 +2773,7 @@ func (s *Server) getRepositoryHandler(w http.ResponseWriter, r *http.Request) { open_issues_count, closed_issues_count, total_tasks_created, created_at, updated_at FROM repositories WHERE id = $1` - + var repo struct { ID string `json:"id"` Name string `json:"name"` @@ -2495,28 +2784,28 @@ func (s *Server) getRepositoryHandler(w http.ResponseWriter, r *http.Request) { SSHURL *string `json:"ssh_url"` SourceType string `json:"source_type"` SourceConfig []byte `json:"source_config"` - MonitorIssues bool `json:"monitor_issues"` - MonitorPullRequests bool `json:"monitor_pull_requests"` - MonitorReleases bool `json:"monitor_releases"` + MonitorIssues bool `json:"monitor_issues"` + MonitorPullRequests bool `json:"monitor_pull_requests"` + MonitorReleases bool `json:"monitor_releases"` EnableChorusIntegration bool `json:"enable_chorus_integration"` - ChorusTaskLabels []string `json:"chorus_task_labels"` - AutoAssignTeams bool `json:"auto_assign_teams"` - Description *string `json:"description"` - DefaultBranch string `json:"default_branch"` - IsPrivate bool `json:"is_private"` - Language *string `json:"language"` - Topics []string `json:"topics"` - LastSyncAt *time.Time `json:"last_sync_at"` - LastIssueSyncAt *time.Time `json:"last_issue_sync"` - SyncStatus string `json:"sync_status"` - SyncError *string `json:"sync_error"` - OpenIssuesCount int `json:"open_issues_count"` - ClosedIssuesCount int `json:"closed_issues_count"` - TotalTasksCreated int `json:"total_tasks_created"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` + ChorusTaskLabels []string `json:"chorus_task_labels"` + AutoAssignTeams bool `json:"auto_assign_teams"` + Description *string `json:"description"` + DefaultBranch string `json:"default_branch"` + IsPrivate bool `json:"is_private"` + Language *string `json:"language"` + Topics []string `json:"topics"` + LastSyncAt *time.Time `json:"last_sync_at"` + LastIssueSyncAt *time.Time `json:"last_issue_sync"` + SyncStatus string `json:"sync_status"` + SyncError *string `json:"sync_error"` + OpenIssuesCount int `json:"open_issues_count"` + ClosedIssuesCount int `json:"closed_issues_count"` + TotalTasksCreated int `json:"total_tasks_created"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` } - + err := s.db.Pool.QueryRow(context.Background(), query, repoID).Scan( &repo.ID, &repo.Name, &repo.Owner, &repo.FullName, &repo.URL, &repo.CloneURL, &repo.SSHURL, &repo.SourceType, &repo.SourceConfig, @@ -2526,7 +2815,7 @@ func (s *Server) getRepositoryHandler(w http.ResponseWriter, r *http.Request) { &repo.Topics, &repo.LastSyncAt, &repo.LastIssueSyncAt, &repo.SyncStatus, &repo.SyncError, &repo.OpenIssuesCount, &repo.ClosedIssuesCount, &repo.TotalTasksCreated, &repo.CreatedAt, &repo.UpdatedAt) - + if err != nil { if err.Error() == "no rows in result set" { render.Status(r, http.StatusNotFound) @@ -2538,39 +2827,39 @@ func (s *Server) getRepositoryHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "failed to get repository"}) return } - + render.JSON(w, r, repo) } // updateRepositoryHandler updates repository settings func (s *Server) updateRepositoryHandler(w http.ResponseWriter, r *http.Request) { repoID := chi.URLParam(r, "repoID") - + var req struct { - MonitorIssues *bool `json:"monitor_issues"` - MonitorPullRequests *bool `json:"monitor_pull_requests"` - MonitorReleases *bool `json:"monitor_releases"` - EnableChorusIntegration *bool `json:"enable_chorus_integration"` - AutoAssignTeams *bool `json:"auto_assign_teams"` - Description *string `json:"description"` - DefaultBranch *string `json:"default_branch"` - Language *string `json:"language"` - Topics []string `json:"topics"` + MonitorIssues *bool `json:"monitor_issues"` + MonitorPullRequests *bool `json:"monitor_pull_requests"` + MonitorReleases *bool `json:"monitor_releases"` + EnableChorusIntegration *bool `json:"enable_chorus_integration"` + AutoAssignTeams *bool `json:"auto_assign_teams"` + Description *string `json:"description"` + DefaultBranch *string `json:"default_branch"` + Language *string `json:"language"` + Topics []string `json:"topics"` } - + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid request body"}) return } - + log.Info().Str("repository_id", repoID).Msg("Updating repository settings") - + // Build dynamic update query updates := []string{} args := []interface{}{repoID} argIndex := 2 - + if req.MonitorIssues != nil { updates = append(updates, fmt.Sprintf("monitor_issues = $%d", argIndex)) args = append(args, *req.MonitorIssues) @@ -2616,18 +2905,18 @@ func (s *Server) updateRepositoryHandler(w http.ResponseWriter, r *http.Request) args = append(args, req.Topics) argIndex++ } - + if len(updates) == 0 { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "no fields to update"}) return } - + updates = append(updates, fmt.Sprintf("updated_at = $%d", argIndex)) args = append(args, time.Now()) - + query := fmt.Sprintf("UPDATE repositories SET %s WHERE id = $1", strings.Join(updates, ", ")) - + _, err := s.db.Pool.Exec(context.Background(), query, args...) if err != nil { log.Error().Err(err).Msg("Failed to update repository") @@ -2635,16 +2924,16 @@ func (s *Server) updateRepositoryHandler(w http.ResponseWriter, r *http.Request) render.JSON(w, r, map[string]string{"error": "failed to update repository"}) return } - + render.JSON(w, r, map[string]string{"message": "Repository updated successfully"}) } // deleteRepositoryHandler removes a repository from monitoring func (s *Server) deleteRepositoryHandler(w http.ResponseWriter, r *http.Request) { repoID := chi.URLParam(r, "repoID") - + log.Info().Str("repository_id", repoID).Msg("Deleting repository monitor") - + query := "DELETE FROM repositories WHERE id = $1" result, err := s.db.Pool.Exec(context.Background(), query, repoID) if err != nil { @@ -2653,33 +2942,33 @@ func (s *Server) deleteRepositoryHandler(w http.ResponseWriter, r *http.Request) render.JSON(w, r, map[string]string{"error": "failed to delete repository"}) return } - + if result.RowsAffected() == 0 { render.Status(r, http.StatusNotFound) render.JSON(w, r, map[string]string{"error": "repository not found"}) return } - + render.JSON(w, r, map[string]string{"message": "Repository deleted successfully"}) } // syncRepositoryHandler triggers a manual sync of repository issues func (s *Server) syncRepositoryHandler(w http.ResponseWriter, r *http.Request) { repoID := chi.URLParam(r, "repoID") - + log.Info().Str("repository_id", repoID).Msg("Manual repository sync triggered") - + if s.repoMonitor == nil { render.Status(r, http.StatusServiceUnavailable) render.JSON(w, r, map[string]string{"error": "repository monitoring service not available"}) return } - + // Trigger repository sync in background go func() { ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) defer cancel() - + if err := s.repoMonitor.SyncRepository(ctx, repoID); err != nil { log.Error(). Err(err). @@ -2687,7 +2976,7 @@ func (s *Server) syncRepositoryHandler(w http.ResponseWriter, r *http.Request) { Msg("Manual repository sync failed") } }() - + render.JSON(w, r, map[string]interface{}{ "message": "Repository sync triggered", "repository_id": repoID, @@ -2698,15 +2987,15 @@ func (s *Server) syncRepositoryHandler(w http.ResponseWriter, r *http.Request) { // ensureRepositoryLabelsHandler ensures required labels exist in the Gitea repository func (s *Server) ensureRepositoryLabelsHandler(w http.ResponseWriter, r *http.Request) { repoID := chi.URLParam(r, "repoID") - + log.Info().Str("repository_id", repoID).Msg("Ensuring repository labels") - + if s.repoMonitor == nil || s.repoMonitor.GetGiteaClient() == nil { render.Status(r, http.StatusServiceUnavailable) render.JSON(w, r, map[string]string{"error": "repository monitoring service not available"}) return } - + // Get repository details first query := "SELECT owner, name FROM repositories WHERE id = $1" var owner, name string @@ -2722,7 +3011,7 @@ func (s *Server) ensureRepositoryLabelsHandler(w http.ResponseWriter, r *http.Re render.JSON(w, r, map[string]string{"error": "failed to get repository"}) return } - + // @goal: WHOOSH-LABELS-004 - Manual label synchronization endpoint // WHY: Allows updating existing repositories to standardized label set err = s.repoMonitor.GetGiteaClient().EnsureRequiredLabels(context.Background(), owner, name) @@ -2737,13 +3026,13 @@ func (s *Server) ensureRepositoryLabelsHandler(w http.ResponseWriter, r *http.Re render.JSON(w, r, map[string]string{"error": "failed to create labels: " + err.Error()}) return } - + log.Info(). Str("repository_id", repoID). Str("owner", owner). Str("name", name). Msg("Successfully ensured repository labels") - + render.JSON(w, r, map[string]interface{}{ "message": "Repository labels ensured successfully", "repository_id": repoID, @@ -2756,15 +3045,15 @@ func (s *Server) ensureRepositoryLabelsHandler(w http.ResponseWriter, r *http.Re func (s *Server) getRepositorySyncLogsHandler(w http.ResponseWriter, r *http.Request) { repoID := chi.URLParam(r, "repoID") limit := 50 - + if limitParam := r.URL.Query().Get("limit"); limitParam != "" { if l, err := strconv.Atoi(limitParam); err == nil && l > 0 && l <= 1000 { limit = l } } - + log.Info().Str("repository_id", repoID).Int("limit", limit).Msg("Getting repository sync logs") - + query := ` SELECT id, sync_type, operation, status, message, error_details, items_processed, items_created, items_updated, duration_ms, @@ -2773,7 +3062,7 @@ func (s *Server) getRepositorySyncLogsHandler(w http.ResponseWriter, r *http.Req WHERE repository_id = $1 ORDER BY created_at DESC LIMIT $2` - + rows, err := s.db.Pool.Query(context.Background(), query, repoID, limit) if err != nil { log.Error().Err(err).Msg("Failed to query sync logs") @@ -2782,7 +3071,7 @@ func (s *Server) getRepositorySyncLogsHandler(w http.ResponseWriter, r *http.Req return } defer rows.Close() - + logs := []map[string]interface{}{} for rows.Next() { var id, syncType, operation, status, message string @@ -2790,7 +3079,7 @@ func (s *Server) getRepositorySyncLogsHandler(w http.ResponseWriter, r *http.Req var itemsProcessed, itemsCreated, itemsUpdated, durationMs int var externalID, externalURL *string var createdAt time.Time - + err := rows.Scan(&id, &syncType, &operation, &status, &message, &errorDetails, &itemsProcessed, &itemsCreated, &itemsUpdated, &durationMs, &externalID, &externalURL, &createdAt) @@ -2798,25 +3087,25 @@ func (s *Server) getRepositorySyncLogsHandler(w http.ResponseWriter, r *http.Req log.Error().Err(err).Msg("Failed to scan sync log row") continue } - + logEntry := map[string]interface{}{ - "id": id, - "sync_type": syncType, - "operation": operation, - "status": status, - "message": message, - "error_details": string(errorDetails), - "items_processed": itemsProcessed, - "items_created": itemsCreated, - "items_updated": itemsUpdated, - "duration_ms": durationMs, - "external_id": externalID, - "external_url": externalURL, - "created_at": createdAt.Format(time.RFC3339), + "id": id, + "sync_type": syncType, + "operation": operation, + "status": status, + "message": message, + "error_details": string(errorDetails), + "items_processed": itemsProcessed, + "items_created": itemsCreated, + "items_updated": itemsUpdated, + "duration_ms": durationMs, + "external_id": externalID, + "external_url": externalURL, + "created_at": createdAt.Format(time.RFC3339), } logs = append(logs, logEntry) } - + render.JSON(w, r, map[string]interface{}{ "logs": logs, "count": len(logs), @@ -2825,6 +3114,68 @@ func (s *Server) getRepositorySyncLogsHandler(w http.ResponseWriter, r *http.Req // Council management handlers +func (s *Server) listCouncilsHandler(w http.ResponseWriter, r *http.Request) { + // Query all councils with basic info + query := ` + SELECT c.id, c.project_name, c.repository, c.status, c.created_at, + COUNT(DISTINCT ca.id) as agent_count, + COUNT(DISTINCT car.id) as artifacts_count + FROM councils c + LEFT JOIN council_agents ca ON c.id = ca.council_id + LEFT JOIN council_artifacts car ON c.id = car.council_id + GROUP BY c.id, c.project_name, c.repository, c.status, c.created_at + ORDER BY c.created_at DESC + LIMIT 100 + ` + + rows, err := s.db.Pool.Query(r.Context(), query) + if err != nil { + log.Error().Err(err).Msg("Failed to query councils") + render.Status(r, http.StatusInternalServerError) + render.JSON(w, r, map[string]string{"error": "failed to retrieve councils"}) + return + } + defer rows.Close() + + var councils []map[string]interface{} + for rows.Next() { + var id uuid.UUID + var projectName, repository, status string + var createdAt time.Time + var agentCount, artifactsCount int + + err := rows.Scan(&id, &projectName, &repository, &status, &createdAt, &agentCount, &artifactsCount) + if err != nil { + log.Error().Err(err).Msg("Failed to scan council row") + continue + } + + council := map[string]interface{}{ + "id": id, + "project_name": projectName, + "repository": repository, + "status": status, + "created_at": createdAt.Format(time.RFC3339), + "agent_count": agentCount, + "artifacts_count": artifactsCount, + } + + councils = append(councils, council) + } + + if err = rows.Err(); err != nil { + log.Error().Err(err).Msg("Error iterating council rows") + render.Status(r, http.StatusInternalServerError) + render.JSON(w, r, map[string]string{"error": "failed to process councils"}) + return + } + + render.JSON(w, r, map[string]interface{}{ + "councils": councils, + "total": len(councils), + }) +} + func (s *Server) getCouncilHandler(w http.ResponseWriter, r *http.Request) { councilIDStr := chi.URLParam(r, "councilID") councilID, err := uuid.Parse(councilIDStr) @@ -2833,7 +3184,7 @@ func (s *Server) getCouncilHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "invalid council ID"}) return } - + composition, err := s.councilComposer.GetCouncilComposition(r.Context(), councilID) if err != nil { if strings.Contains(err.Error(), "no rows in result set") { @@ -2841,13 +3192,13 @@ func (s *Server) getCouncilHandler(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]string{"error": "council not found"}) return } - + log.Error().Err(err).Str("council_id", councilIDStr).Msg("Failed to get council composition") render.Status(r, http.StatusInternalServerError) render.JSON(w, r, map[string]string{"error": "failed to retrieve council"}) return } - + render.JSON(w, r, composition) } @@ -2859,7 +3210,7 @@ func (s *Server) getCouncilArtifactsHandler(w http.ResponseWriter, r *http.Reque render.JSON(w, r, map[string]string{"error": "invalid council ID"}) return } - + // Query all artifacts for this council query := ` SELECT id, artifact_type, artifact_name, content, content_json, produced_at, produced_by, status @@ -2867,7 +3218,7 @@ func (s *Server) getCouncilArtifactsHandler(w http.ResponseWriter, r *http.Reque WHERE council_id = $1 ORDER BY produced_at DESC ` - + rows, err := s.db.Pool.Query(r.Context(), query, councilID) if err != nil { log.Error().Err(err).Str("council_id", councilIDStr).Msg("Failed to query council artifacts") @@ -2876,7 +3227,7 @@ func (s *Server) getCouncilArtifactsHandler(w http.ResponseWriter, r *http.Reque return } defer rows.Close() - + var artifacts []map[string]interface{} for rows.Next() { var id uuid.UUID @@ -2885,13 +3236,13 @@ func (s *Server) getCouncilArtifactsHandler(w http.ResponseWriter, r *http.Reque var contentJSON []byte var producedAt time.Time var producedBy *string - + err := rows.Scan(&id, &artifactType, &artifactName, &content, &contentJSON, &producedAt, &producedBy, &status) if err != nil { log.Error().Err(err).Msg("Failed to scan artifact row") continue } - + artifact := map[string]interface{}{ "id": id, "artifact_type": artifactType, @@ -2901,7 +3252,7 @@ func (s *Server) getCouncilArtifactsHandler(w http.ResponseWriter, r *http.Reque "produced_by": producedBy, "status": status, } - + // Parse JSON content if available if contentJSON != nil { var jsonData interface{} @@ -2909,17 +3260,17 @@ func (s *Server) getCouncilArtifactsHandler(w http.ResponseWriter, r *http.Reque artifact["content_json"] = jsonData } } - + artifacts = append(artifacts, artifact) } - + if err = rows.Err(); err != nil { log.Error().Err(err).Msg("Error iterating artifact rows") render.Status(r, http.StatusInternalServerError) render.JSON(w, r, map[string]string{"error": "failed to process artifacts"}) return } - + render.JSON(w, r, map[string]interface{}{ "council_id": councilID, "artifacts": artifacts, @@ -2935,7 +3286,7 @@ func (s *Server) createCouncilArtifactHandler(w http.ResponseWriter, r *http.Req render.JSON(w, r, map[string]string{"error": "invalid council ID"}) return } - + var req struct { ArtifactType string `json:"artifact_type"` ArtifactName string `json:"artifact_name"` @@ -2944,25 +3295,25 @@ func (s *Server) createCouncilArtifactHandler(w http.ResponseWriter, r *http.Req ProducedBy *string `json:"produced_by,omitempty"` Status *string `json:"status,omitempty"` } - + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid JSON body"}) return } - + if req.ArtifactType == "" || req.ArtifactName == "" { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "artifact_type and artifact_name are required"}) return } - + // Set default status if not provided status := "draft" if req.Status != nil { status = *req.Status } - + // Validate artifact type (based on the constraint in the migration) validTypes := map[string]bool{ "kickoff_manifest": true, @@ -2974,13 +3325,13 @@ func (s *Server) createCouncilArtifactHandler(w http.ResponseWriter, r *http.Req "shhh_policy": true, "ucxl_root": true, } - + if !validTypes[req.ArtifactType] { render.Status(r, http.StatusBadRequest) render.JSON(w, r, map[string]string{"error": "invalid artifact_type"}) return } - + // Prepare JSON content var contentJSONBytes []byte if req.ContentJSON != nil { @@ -2991,27 +3342,27 @@ func (s *Server) createCouncilArtifactHandler(w http.ResponseWriter, r *http.Req return } } - + // Insert artifact insertQuery := ` INSERT INTO council_artifacts (council_id, artifact_type, artifact_name, content, content_json, produced_by, status) VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING id, produced_at ` - + var artifactID uuid.UUID var producedAt time.Time - - err = s.db.Pool.QueryRow(r.Context(), insertQuery, councilID, req.ArtifactType, req.ArtifactName, + + err = s.db.Pool.QueryRow(r.Context(), insertQuery, councilID, req.ArtifactType, req.ArtifactName, req.Content, contentJSONBytes, req.ProducedBy, status).Scan(&artifactID, &producedAt) - + if err != nil { log.Error().Err(err).Str("council_id", councilIDStr).Msg("Failed to create council artifact") render.Status(r, http.StatusInternalServerError) render.JSON(w, r, map[string]string{"error": "failed to create artifact"}) return } - + response := map[string]interface{}{ "id": artifactID, "council_id": councilID, @@ -3023,7 +3374,7 @@ func (s *Server) createCouncilArtifactHandler(w http.ResponseWriter, r *http.Req "status": status, "produced_at": producedAt.Format(time.RFC3339), } - + render.Status(r, http.StatusCreated) render.JSON(w, r, response) } @@ -3043,7 +3394,7 @@ func (s *Server) lookupProjectData(ctx context.Context, projectID string, projec WHERE name = $1 OR full_name LIKE '%/' || $1 LIMIT 1 ` - + var name, url string err := s.db.Pool.QueryRow(ctx, query, projectID).Scan(&name, &url) if err != nil { @@ -3053,17 +3404,17 @@ func (s *Server) lookupProjectData(ctx context.Context, projectID string, projec log.Error().Err(err).Str("project_id", projectID).Msg("Failed to query repository") return fmt.Errorf("database error: %w", err) } - + // Populate the project data projectData.Name = name projectData.RepoURL = url - + log.Info(). Str("project_id", projectID). Str("name", name). Str("repo_url", url). Msg("Found project data in repositories table") - + return nil } @@ -3086,25 +3437,25 @@ func (s *Server) inferTechStackFromLabels(labels []string) []string { "frontend": true, "database": true, } - + var techStack []string for _, label := range labels { if techMap[strings.ToLower(label)] { techStack = append(techStack, strings.ToLower(label)) } } - + return techStack } // processTaskAsync handles complex task processing in background func (s *Server) processTaskAsync(taskID string, taskInput *composer.TaskAnalysisInput) { ctx := context.Background() - + log.Info(). Str("task_id", taskID). Msg("Starting async task processing") - + result, err := s.teamComposer.AnalyzeAndComposeTeam(ctx, taskInput) if err != nil { log.Error().Err(err). @@ -3112,7 +3463,7 @@ func (s *Server) processTaskAsync(taskID string, taskInput *composer.TaskAnalysi Msg("Async task analysis failed") return } - + team, err := s.teamComposer.CreateTeam(ctx, result.TeamComposition, taskInput) if err != nil { log.Error().Err(err). @@ -3120,13 +3471,881 @@ func (s *Server) processTaskAsync(taskID string, taskInput *composer.TaskAnalysi Msg("Async team creation failed") return } - + log.Info(). Str("task_id", taskID). Str("team_id", team.ID.String()). Float64("confidence", result.TeamComposition.ConfidenceScore). Msg("Async task processing completed") - + // In production, this would update task status in database // and potentially notify clients via websockets or webhooks -} \ No newline at end of file +} + +func (s *Server) startCouncilRebroadcastMonitor(opportunity *p2p.CouncilOpportunity) { + s.rebroadcastMu.Lock() + if cancel, ok := s.activeBroadcasts[opportunity.CouncilID]; ok { + delete(s.activeBroadcasts, opportunity.CouncilID) + cancel() + } + ctx, cancel := context.WithCancel(context.Background()) + s.activeBroadcasts[opportunity.CouncilID] = cancel + s.rebroadcastMu.Unlock() + + go s.councilRebroadcastLoop(ctx, opportunity) +} + +func (s *Server) stopCouncilRebroadcast(councilID uuid.UUID) { + s.rebroadcastMu.Lock() + cancel, ok := s.activeBroadcasts[councilID] + if ok { + delete(s.activeBroadcasts, councilID) + } + s.rebroadcastMu.Unlock() + + if ok { + cancel() + } +} + +func (s *Server) clearCouncilBroadcast(councilID uuid.UUID) { + s.rebroadcastMu.Lock() + delete(s.activeBroadcasts, councilID) + s.rebroadcastMu.Unlock() +} + +func (s *Server) councilRebroadcastLoop(ctx context.Context, opportunity *p2p.CouncilOpportunity) { + defer s.clearCouncilBroadcast(opportunity.CouncilID) + + interval := 10 * time.Second + maxInterval := 2 * time.Minute + + for { + select { + case <-ctx.Done(): + log.Info().Str("council_id", opportunity.CouncilID.String()).Msg("πŸ›‘ Stopping council rebroadcast monitor") + return + case <-time.After(interval): + } + + pending, err := s.hasUnfilledCoreRoles(ctx, opportunity.CouncilID) + if err != nil { + log.Warn().Err(err).Str("council_id", opportunity.CouncilID.String()).Msg("Failed to evaluate council staffing status") + continue + } + + if !pending { + log.Info().Str("council_id", opportunity.CouncilID.String()).Msg("🎯 Council fully staffed; stopping rebroadcasts") + return + } + + log.Info(). + Str("council_id", opportunity.CouncilID.String()). + Dur("interval", interval). + Msg("πŸ“‘ Re-broadcasting council opportunity to fill remaining roles") + + broadcastCtx, cancel := context.WithTimeout(ctx, 20*time.Second) + if err := s.p2pBroadcaster.BroadcastCouncilOpportunity(broadcastCtx, opportunity); err != nil { + log.Warn().Err(err).Str("council_id", opportunity.CouncilID.String()).Msg("Council rebroadcast failed") + } + cancel() + + if interval < maxInterval { + interval *= 2 + if interval > maxInterval { + interval = maxInterval + } + } + } +} + +func (s *Server) hasUnfilledCoreRoles(ctx context.Context, councilID uuid.UUID) (bool, error) { + query := ` + SELECT COUNT(*) + FROM council_agents + WHERE council_id = $1 + AND required = true + AND (deployed = false OR status NOT IN ('assigned', 'active')) + ` + + var remaining int + if err := s.db.Pool.QueryRow(ctx, query, councilID).Scan(&remaining); err != nil { + return false, err + } + + return remaining > 0, nil +} + +// handleCouncilRoleClaim handles POST /api/v1/councils/{councilID}/claims +// This endpoint receives role claim requests from CHORUS agents +func (s *Server) handleCouncilRoleClaim(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + councilID := chi.URLParam(r, "councilID") + if councilID == "" { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "council_id is required"}) + return + } + + // Parse UUID + councilUUID, err := uuid.Parse(councilID) + if err != nil { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "invalid council_id format"}) + return + } + + // Parse claim request + var claim struct { + AgentID string `json:"agent_id"` + AgentName string `json:"agent_name"` + RoleName string `json:"role_name"` + Capabilities []string `json:"capabilities"` + Confidence float64 `json:"confidence"` + Reasoning string `json:"reasoning"` + Endpoint string `json:"endpoint"` + P2PAddr string `json:"p2p_addr"` + } + + if err := s.validator.DecodeAndValidateJSON(r, &claim); err != nil { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "invalid JSON payload"}) + return + } + + log.Info(). + Str("council_id", councilID). + Str("agent_id", claim.AgentID). + Str("role_name", claim.RoleName). + Float64("confidence", claim.Confidence). + Msg("πŸ€– Agent claiming council role") + + // Verify council exists and get current state + councilQuery := ` + SELECT id, project_name, status, project_brief, repository, external_url, issue_id, brief_dispatched_at, brief_owner_role + FROM councils + WHERE id = $1 + ` + + var existingCouncil struct { + ID uuid.UUID + ProjectName string + Status string + ProjectBrief string + Repository string + ExternalURL sql.NullString + IssueID sql.NullInt64 + BriefDispatchedAt sql.NullTime + BriefOwnerRole sql.NullString + } + + err = s.db.Pool.QueryRow(ctx, councilQuery, councilUUID).Scan( + &existingCouncil.ID, + &existingCouncil.ProjectName, + &existingCouncil.Status, + &existingCouncil.ProjectBrief, + &existingCouncil.Repository, + &existingCouncil.ExternalURL, + &existingCouncil.IssueID, + &existingCouncil.BriefDispatchedAt, + &existingCouncil.BriefOwnerRole, + ) + + if err != nil { + log.Error(). + Err(err). + Str("council_id", councilID). + Msg("Council not found") + render.Status(r, http.StatusNotFound) + render.JSON(w, r, map[string]string{"error": "council not found"}) + return + } + + // Check if council is still forming (not already active/completed) + if existingCouncil.Status != "forming" && existingCouncil.Status != "active" { + render.Status(r, http.StatusConflict) + render.JSON(w, r, map[string]string{"error": fmt.Sprintf("council is %s, not accepting claims", existingCouncil.Status)}) + return + } + + // Check if role exists and is unclaimed + roleCheckQuery := ` + SELECT agent_id, agent_name, role_name, required, deployed, status, persona_status, endpoint_url + FROM council_agents + WHERE council_id = $1 AND role_name = $2 + ` + + var existingRole struct { + AgentID string + AgentName string + RoleName string + Required bool + Deployed bool + Status string + PersonaStatus string + EndpointURL sql.NullString + } + + err = s.db.Pool.QueryRow(ctx, roleCheckQuery, councilUUID, claim.RoleName).Scan( + &existingRole.AgentID, + &existingRole.AgentName, + &existingRole.RoleName, + &existingRole.Required, + &existingRole.Deployed, + &existingRole.Status, + &existingRole.PersonaStatus, + &existingRole.EndpointURL, + ) + + if err != nil { + log.Error(). + Err(err). + Str("role_name", claim.RoleName). + Msg("Role not found in council") + render.Status(r, http.StatusNotFound) + render.JSON(w, r, map[string]string{"error": "role not found in council"}) + return + } + + // Check if role is already claimed + if existingRole.Deployed || existingRole.Status == "assigned" || existingRole.Status == "active" { + log.Warn(). + Str("role_name", claim.RoleName). + Str("current_agent", existingRole.AgentID). + Msg("Role already claimed by another agent") + render.Status(r, http.StatusConflict) + render.JSON(w, r, map[string]string{"error": "role already claimed"}) + return + } + + // Assign role to agent + updateQuery := ` + UPDATE council_agents + SET + deployed = true, + status = 'assigned', + service_id = $1, + endpoint_url = $2, + persona_status = 'pending', + persona_loaded_at = NULL, + deployed_at = NOW(), + updated_at = NOW() + WHERE council_id = $3 AND role_name = $4 + ` + + retry := false + _, err = s.db.Pool.Exec(ctx, updateQuery, claim.AgentID, claim.Endpoint, councilUUID, claim.RoleName) + if err != nil { + if pgErr, ok := err.(*pgconn.PgError); ok && pgErr.Code == "23514" { + retry = true + log.Warn(). + Str("council_id", councilID). + Str("role_name", claim.RoleName). + Str("agent_id", claim.AgentID). + Msg("Council role assignment hit legacy status constraint – attempting remediation") + + if ensureErr := s.ensureCouncilAgentStatusConstraint(ctx); ensureErr != nil { + log.Error(). + Err(ensureErr). + Str("council_id", councilID). + Msg("Failed to reconcile council agent status constraint") + // keep original error to return below + } else { + _, err = s.db.Pool.Exec(ctx, updateQuery, claim.AgentID, claim.Endpoint, councilUUID, claim.RoleName) + } + } + } + + if err != nil { + log.Error(). + Err(err). + Str("council_id", councilID). + Str("role_name", claim.RoleName). + Msg("Failed to assign role to agent") + render.Status(r, http.StatusInternalServerError) + render.JSON(w, r, map[string]string{"error": "failed to assign role"}) + return + } + + if retry { + log.Info(). + Str("council_id", councilID). + Str("role_name", claim.RoleName). + Msg("Council agent status constraint updated to allow 'assigned'") + } + + log.Info(). + Str("council_id", councilID). + Str("agent_id", claim.AgentID). + Str("role_name", claim.RoleName). + Msg("βœ… Successfully assigned council role to agent") + + roleProfile := s.lookupRoleProfile(claim.RoleName, existingRole.AgentName) + ucxlCouncilAddress := fmt.Sprintf("ucxl://team:council@project:%s:council/councils/%s", sanitizeUCXLIdentifier(existingCouncil.ProjectName), councilID) + + briefSummary := map[string]interface{}{ + "project_name": existingCouncil.ProjectName, + "repository": existingCouncil.Repository, + "ucxl_address": ucxlCouncilAddress, + } + + if trimmed := strings.TrimSpace(existingCouncil.ProjectBrief); trimmed != "" { + briefSummary["summary"] = truncateString(trimmed, 2000) + } + + if existingCouncil.ExternalURL.Valid { + briefSummary["external_url"] = existingCouncil.ExternalURL.String + } + + if existingCouncil.IssueID.Valid { + briefSummary["issue_id"] = existingCouncil.IssueID.Int64 + } + + if existingCouncil.BriefOwnerRole.Valid { + briefSummary["brief_owner_role"] = existingCouncil.BriefOwnerRole.String + } + + // Check if all core roles are now claimed + roleCountsQuery := ` + SELECT + COUNT(*) FILTER (WHERE required = true) AS total_core, + COUNT(*) FILTER (WHERE required = true AND deployed = true AND status IN ('assigned', 'active')) AS claimed_core, + COUNT(*) FILTER (WHERE required = false) AS total_optional, + COUNT(*) FILTER (WHERE required = false AND deployed = true AND status IN ('assigned', 'active')) AS claimed_optional + FROM council_agents + WHERE council_id = $1 + ` + + var roleCounts struct { + TotalCore int + ClaimedCore int + TotalOptional int + ClaimedOptional int + } + + err = s.db.Pool.QueryRow(ctx, roleCountsQuery, councilUUID).Scan( + &roleCounts.TotalCore, + &roleCounts.ClaimedCore, + &roleCounts.TotalOptional, + &roleCounts.ClaimedOptional, + ) + if err != nil { + log.Error().Err(err).Msg("Failed to check core role status") + } else if roleCounts.TotalCore > 0 && roleCounts.ClaimedCore == roleCounts.TotalCore { + // All core roles claimed - activate council + _, err = s.db.Pool.Exec(ctx, "UPDATE councils SET status = 'active', updated_at = NOW() WHERE id = $1", councilUUID) + if err == nil { + log.Info(). + Str("council_id", councilID). + Int("core_roles", roleCounts.TotalCore). + Msg("πŸŽ‰ All core roles claimed - Council activated!") + // Stop any ongoing rebroadcast loop now that the council is fully staffed + s.stopCouncilRebroadcast(councilUUID) + + go func() { + broadcastCtx, cancel := context.WithTimeout(context.Background(), 20*time.Second) + defer cancel() + + statusUpdate, buildErr := s.buildCouncilStatusUpdate(broadcastCtx, councilUUID, "active", fmt.Sprintf("All %d core roles claimed", roleCounts.TotalCore)) + if buildErr != nil { + log.Warn().Err(buildErr).Str("council_id", councilID).Msg("Failed to build council status update snapshot") + } else { + if err := s.p2pBroadcaster.BroadcastCouncilStatusUpdate(broadcastCtx, statusUpdate); err != nil { + log.Warn().Err(err).Str("council_id", councilID).Msg("Failed to broadcast council status update") + } + } + + // Trigger team composition for the associated task once council is active + if err := s.triggerTeamCompositionForCouncil(broadcastCtx, existingCouncil.ID); err != nil { + log.Warn().Err(err). + Str("council_id", councilID). + Msg("Failed to trigger team composition after council activation") + } + }() + } + } + + // Return success response + response := map[string]interface{}{ + "status": "accepted", + "council_id": councilID, + "role_name": claim.RoleName, + "ucxl_address": fmt.Sprintf("%s/roles/%s", ucxlCouncilAddress, sanitizeUCXLIdentifier(claim.RoleName)), + "assigned_at": time.Now().Format(time.RFC3339), + "role_profile": roleProfile, + "council_brief": briefSummary, + "persona_status": "pending", + } + + render.Status(r, http.StatusCreated) + render.JSON(w, r, response) +} + +func (s *Server) ensureCouncilAgentStatusConstraint(ctx context.Context) error { + s.constraintMu.Lock() + defer s.constraintMu.Unlock() + + tx, err := s.db.Pool.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + return fmt.Errorf("begin council agent status constraint update: %w", err) + } + + dropStmt := `ALTER TABLE council_agents DROP CONSTRAINT IF EXISTS council_agents_status_check` + if _, err := tx.Exec(ctx, dropStmt); err != nil { + tx.Rollback(ctx) + return fmt.Errorf("drop council agent status constraint: %w", err) + } + + addStmt := `ALTER TABLE council_agents ADD CONSTRAINT council_agents_status_check CHECK (status IN ('pending', 'deploying', 'assigned', 'active', 'failed', 'removed'))` + if _, err := tx.Exec(ctx, addStmt); err != nil { + tx.Rollback(ctx) + + if pgErr, ok := err.(*pgconn.PgError); ok && pgErr.Code == "42710" { + return nil + } + + return fmt.Errorf("add council agent status constraint: %w", err) + } + + if err := tx.Commit(ctx); err != nil { + return fmt.Errorf("commit council agent status constraint update: %w", err) + } + + return nil +} + +// triggerTeamCompositionForCouncil starts team composition for the task linked to the council +func (s *Server) triggerTeamCompositionForCouncil(ctx context.Context, councilID uuid.UUID) error { + // Look up the task associated with this council. For now we assume the task ID matches the council ID. + // Future work could store an explicit mapping in the database. + taskID := councilID.String() + + log := zerolog.Ctx(ctx).With().Str("council_id", councilID.String()).Str("task_id", taskID).Logger() + log.Info().Msg("πŸ” Triggering team composition for council-linked task") + + // Reuse the monitor's capability to run team composition if available. + // During server initialization the monitor reference is optional; guard against nil. + if s.repoMonitor == nil { + return fmt.Errorf("monitor not initialized; cannot trigger team composition") + } + + // Use the monitor's helper so the same logic runs as for bzzz-task issues. + go s.repoMonitor.TriggerTeamCompositionForCouncil(ctx, taskID) + + return nil +} + +// handleCouncilPersonaAck receives persona readiness status from CHORUS agents. +func (s *Server) handleCouncilPersonaAck(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + councilIDParam := chi.URLParam(r, "councilID") + if councilIDParam == "" { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "council_id is required"}) + return + } + + roleName := chi.URLParam(r, "roleName") + if roleName == "" { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "roleName is required"}) + return + } + + councilUUID, err := uuid.Parse(councilIDParam) + if err != nil { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "invalid council_id format"}) + return + } + + type personaAckPayload struct { + AgentID string `json:"agent_id"` + Status string `json:"status"` + ModelProvider string `json:"model_provider"` + ModelName string `json:"model_name,omitempty"` + SystemPromptHash string `json:"system_prompt_hash,omitempty"` + Capabilities []string `json:"capabilities,omitempty"` + Errors []string `json:"errors,omitempty"` + Metadata map[string]interface{} `json:"metadata,omitempty"` + EndpointOverride string `json:"endpoint_override,omitempty"` + } + + var payload personaAckPayload + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + render.Status(r, http.StatusBadRequest) + render.JSON(w, r, map[string]string{"error": "invalid JSON payload"}) + return + } + + status := strings.TrimSpace(strings.ToLower(payload.Status)) + if status == "" { + status = "pending" + } + + ackRecord := map[string]interface{}{ + "agent_id": payload.AgentID, + "status": status, + "model_provider": payload.ModelProvider, + "model_name": payload.ModelName, + "system_prompt_hash": payload.SystemPromptHash, + "capabilities": payload.Capabilities, + "metadata": payload.Metadata, + "errors": payload.Errors, + "endpoint_override": payload.EndpointOverride, + "acknowledged_at": time.Now().UTC().Format(time.RFC3339), + } + + ackPayloadJSON, err := json.Marshal(ackRecord) + if err != nil { + render.Status(r, http.StatusInternalServerError) + render.JSON(w, r, map[string]string{"error": "failed to marshal persona ack payload"}) + return + } + + updateQuery := ` + UPDATE council_agents + SET + persona_status = $1::text, + persona_loaded_at = CASE WHEN $1::text = 'loaded' THEN NOW() ELSE persona_loaded_at END, + persona_ack_payload = COALESCE($2::jsonb, persona_ack_payload), + endpoint_url = COALESCE(NULLIF($3, '')::text, endpoint_url), + updated_at = NOW() + WHERE council_id = $4 AND role_name = $5 + ` + + var ackJSON interface{} + if len(ackPayloadJSON) > 0 { + ackJSON = string(ackPayloadJSON) + } + + commandTag, err := s.db.Pool.Exec(ctx, updateQuery, status, ackJSON, payload.EndpointOverride, councilUUID, roleName) + if err != nil { + log.Error().Err(err). + Str("council_id", councilIDParam). + Str("role_name", roleName). + Msg("Failed to update persona status") + render.Status(r, http.StatusInternalServerError) + render.JSON(w, r, map[string]string{"error": "failed to update persona status"}) + return + } + + if commandTag.RowsAffected() == 0 { + render.Status(r, http.StatusNotFound) + render.JSON(w, r, map[string]string{"error": "council role not found"}) + return + } + + log.Info(). + Str("council_id", councilIDParam). + Str("role_name", roleName). + Str("agent_id", payload.AgentID). + Str("status", status). + Msg("πŸ“© Persona status acknowledged") + + go s.onPersonaAcknowledged(context.Background(), councilUUID) + + render.Status(r, http.StatusAccepted) + render.JSON(w, r, map[string]interface{}{ + "status": status, + "timestamp": time.Now().Unix(), + }) +} + +func (s *Server) onPersonaAcknowledged(ctx context.Context, councilID uuid.UUID) { + if ctx == nil { + ctx = context.Background() + } + + statusUpdate, err := s.buildCouncilStatusUpdate(ctx, councilID, "", "Persona status updated") + if err != nil { + log.Warn().Err(err).Str("council_id", councilID.String()).Msg("Failed to build council status snapshot after persona ack") + } else if statusUpdate != nil && s.p2pBroadcaster != nil { + txCtx, cancel := context.WithTimeout(ctx, 10*time.Second) + defer cancel() + if err := s.p2pBroadcaster.BroadcastCouncilStatusUpdate(txCtx, statusUpdate); err != nil { + log.Warn().Err(err).Str("council_id", councilID.String()).Msg("Failed to broadcast persona status update") + } + } + + ready, err := s.allCorePersonasLoaded(ctx, councilID) + if err != nil { + log.Warn().Err(err).Str("council_id", councilID.String()).Msg("Failed to evaluate core persona readiness") + return + } + + if ready { + if err := s.dispatchCouncilBrief(ctx, councilID); err != nil { + log.Warn().Err(err).Str("council_id", councilID.String()).Msg("Failed to dispatch council design brief") + } + } +} + +func (s *Server) allCorePersonasLoaded(ctx context.Context, councilID uuid.UUID) (bool, error) { + query := ` + SELECT + COUNT(*) FILTER (WHERE required = true) AS total_core, + COUNT(*) FILTER (WHERE required = true AND persona_status = 'loaded') AS loaded_core + FROM council_agents + WHERE council_id = $1 + ` + + var totals struct { + TotalCore int + LoadedCore int + } + + if err := s.db.Pool.QueryRow(ctx, query, councilID).Scan(&totals.TotalCore, &totals.LoadedCore); err != nil { + return false, err + } + + if totals.TotalCore == 0 { + return false, nil + } + + return totals.TotalCore == totals.LoadedCore, nil +} + +func (s *Server) buildCouncilStatusUpdate(ctx context.Context, councilID uuid.UUID, statusOverride, message string) (*p2p.CouncilStatusUpdate, error) { + councilQuery := ` + SELECT project_name, status, brief_dispatched_at + FROM councils + WHERE id = $1 + ` + + var councilRow struct { + ProjectName string + Status string + BriefDispatchedAt sql.NullTime + } + + if err := s.db.Pool.QueryRow(ctx, councilQuery, councilID).Scan(&councilRow.ProjectName, &councilRow.Status, &councilRow.BriefDispatchedAt); err != nil { + return nil, err + } + + rolesQuery := ` + SELECT required, deployed, status, persona_status + FROM council_agents + WHERE council_id = $1 + ` + + rows, err := s.db.Pool.Query(ctx, rolesQuery, councilID) + if err != nil { + return nil, err + } + defer rows.Close() + + var ( + totalCore, claimedCore int + totalOptional, claimedOptional int + totalPersonas, loadedPersonas int + corePersonasLoaded int + ) + + for rows.Next() { + var required, deployed bool + var status, personaStatus string + if err := rows.Scan(&required, &deployed, &status, &personaStatus); err != nil { + return nil, err + } + + if required { + totalCore++ + if deployed && (status == "assigned" || status == "active") { + claimedCore++ + } + } else { + totalOptional++ + if deployed && (status == "assigned" || status == "active") { + claimedOptional++ + } + } + + if deployed { + totalPersonas++ + if personaStatus == "loaded" { + loadedPersonas++ + if required { + corePersonasLoaded++ + } + } + } + } + + if err := rows.Err(); err != nil { + return nil, err + } + + status := councilRow.Status + if statusOverride != "" { + status = statusOverride + } + + if message == "" { + message = fmt.Sprintf("Core roles %d/%d claimed", claimedCore, totalCore) + } + + return &p2p.CouncilStatusUpdate{ + CouncilID: councilID, + ProjectName: councilRow.ProjectName, + Status: status, + Message: message, + Timestamp: time.Now(), + CoreRoles: p2p.RoleCounts{ + Total: totalCore, + Claimed: claimedCore, + }, + Optional: p2p.RoleCounts{ + Total: totalOptional, + Claimed: claimedOptional, + }, + Personas: p2p.PersonaCounts{ + Total: totalPersonas, + Loaded: loadedPersonas, + CoreLoaded: corePersonasLoaded, + }, + BriefDispatched: councilRow.BriefDispatchedAt.Valid, + }, nil +} + +func (s *Server) dispatchCouncilBrief(ctx context.Context, councilID uuid.UUID) error { + const leadRole = "tpm" + + councilQuery := ` + SELECT project_name, project_brief, repository, external_url, issue_id, brief_dispatched_at + FROM councils + WHERE id = $1 + ` + + var councilRow struct { + ProjectName string + ProjectBrief string + Repository string + ExternalURL sql.NullString + IssueID sql.NullInt64 + BriefDispatchedAt sql.NullTime + } + + if err := s.db.Pool.QueryRow(ctx, councilQuery, councilID).Scan( + &councilRow.ProjectName, + &councilRow.ProjectBrief, + &councilRow.Repository, + &councilRow.ExternalURL, + &councilRow.IssueID, + &councilRow.BriefDispatchedAt, + ); err != nil { + return err + } + + if councilRow.BriefDispatchedAt.Valid { + log.Debug().Str("council_id", councilID.String()).Msg("Council brief already dispatched") + return nil + } + + agentQuery := ` + SELECT agent_id, endpoint_url + FROM council_agents + WHERE council_id = $1 AND role_name = $2 AND deployed = true + ` + + var agentRow struct { + AgentID string + Endpoint sql.NullString + } + + if err := s.db.Pool.QueryRow(ctx, agentQuery, councilID, leadRole).Scan(&agentRow.AgentID, &agentRow.Endpoint); err != nil { + return fmt.Errorf("failed to load project lead agent: %w", err) + } + + if !agentRow.Endpoint.Valid || strings.TrimSpace(agentRow.Endpoint.String) == "" { + return fmt.Errorf("project lead endpoint not available") + } + + briefPayload := map[string]interface{}{ + "council_id": councilID.String(), + "project_name": councilRow.ProjectName, + "repository": councilRow.Repository, + "ucxl_address": fmt.Sprintf("ucxl://team:council@project:%s:council/councils/%s", sanitizeUCXLIdentifier(councilRow.ProjectName), councilID.String()), + "hmmm_topic": fmt.Sprintf("council:%s", councilID.String()), + "expected_artifacts": []string{ + "kickoff_manifest", + "seminal_dr", + "scaffold_plan", + "gate_tests", + }, + } + + if trimmed := strings.TrimSpace(councilRow.ProjectBrief); trimmed != "" { + briefPayload["summary"] = trimmed + } + + if councilRow.ExternalURL.Valid { + briefPayload["brief_url"] = councilRow.ExternalURL.String + } + + if councilRow.IssueID.Valid { + briefPayload["issue_id"] = councilRow.IssueID.Int64 + } + + payloadBytes, err := json.Marshal(briefPayload) + if err != nil { + return fmt.Errorf("failed to marshal brief payload: %w", err) + } + + requestURL := fmt.Sprintf("%s/api/v1/councils/%s/roles/%s/brief", strings.TrimRight(agentRow.Endpoint.String, "/"), councilID.String(), leadRole) + log.Info(). + Str("council_id", councilID.String()). + Str("role", leadRole). + Str("endpoint", requestURL). + Msg("πŸ“¦ Dispatching design brief to council project lead") + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, requestURL, bytes.NewBuffer(payloadBytes)) + if err != nil { + return fmt.Errorf("failed to create brief dispatch request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("X-WHOOSH-Broadcast", "council-brief") + + client := &http.Client{Timeout: 15 * time.Second} + resp, err := client.Do(req) + if err != nil { + return fmt.Errorf("failed to send brief to project lead: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusAccepted { + body, _ := io.ReadAll(io.LimitReader(resp.Body, 1024)) + return fmt.Errorf("project lead returned non-success status %d: %s", resp.StatusCode, string(body)) + } + + updateQuery := ` + UPDATE councils + SET brief_dispatched_at = NOW(), brief_owner_role = $2, updated_at = NOW() + WHERE id = $1 AND brief_dispatched_at IS NULL + ` + + if _, err := s.db.Pool.Exec(ctx, updateQuery, councilID, leadRole); err != nil { + log.Warn().Err(err). + Str("council_id", councilID.String()). + Msg("Brief delivered but failed to update dispatch timestamp") + } else { + log.Info(). + Str("council_id", councilID.String()). + Str("role", leadRole). + Msg("🎯 Council brief dispatched to project lead") + } + + statusUpdate, err := s.buildCouncilStatusUpdate(ctx, councilID, "active", "Brief dispatched to project lead") + if err == nil && statusUpdate != nil && s.p2pBroadcaster != nil { + statusUpdate.BriefDispatched = true + txCtx, cancel := context.WithTimeout(ctx, 10*time.Second) + defer cancel() + if err := s.p2pBroadcaster.BroadcastCouncilStatusUpdate(txCtx, statusUpdate); err != nil { + log.Warn().Err(err).Str("council_id", councilID.String()).Msg("Failed to broadcast brief dispatch update") + } + } + + return nil +} diff --git a/ui/index.html b/ui/index.html index bea3982..5688d07 100644 --- a/ui/index.html +++ b/ui/index.html @@ -3,277 +3,34 @@ - WHOOSH - Council Formation Engine [External UI] - - - - + WHOOSH UI + -
-
- - - -
- -
-
-
-

Chart System Metrics

-
- Active Councils - 0 -
-
- Deployed Agents - 0 -
-
- Completed Tasks - 0 -
-
- -
-

Refresh Recent Activity

-
-
Empty
-

No recent activity

-
-
- -
-

Status System Status

-
- Database - βœ… Healthy -
-
- GITEA Integration - βœ… Connected -
-
- BACKBEAT - βœ… Active -
-
- -
-
- Tempo - -- -
-
- Volume - -- -
-
- Phase - -- -
-
- -
-
- Live BACKBEAT Pulse -
-
-
-
- - -
-
- -
- -
-
-

Tasks Active Tasks

-
-
-
No tasks
-

No active tasks found

-
-
-
- -
-

Scheduled Tasks

-
-
-
No scheduled tasks
-

No scheduled tasks found

-
-
-
-
-
- - -
-
-

Team Team Management

- -
- -
-
-
No teams
-

No teams configured yet

-
-
-
- - -
-
-

Agents Agent Management

- -
- -
-
-
No agents
-

No agents registered yet

-
-
-
- - -
-

Settings System Configuration

- -
-
-

GITEA Integration

-
- Base URL - https://gitea.chorus.services -
-
- Webhook Path - /webhooks/gitea -
-
- Token Status - Valid Valid -
-
- -
-

Repository Management

- - - -
- -
-

Chart Repository Stats

-
- Total Repositories - -- -
-
- Active Monitoring - -- -
-
- Last Sync - -- -
-
-
-
- - -
-
-

Repository Management

- -
- -
-

Monitored Repositories

-
-

Loading repositories...

-
-
-
-
- - + + - \ No newline at end of file + diff --git a/ui/script.js b/ui/script.js index bd63f5d..67b861f 100644 --- a/ui/script.js +++ b/ui/script.js @@ -1,705 +1,517 @@ -// WHOOSH Dashboard JavaScript +document.addEventListener('DOMContentLoaded', () => { + const mainContent = document.getElementById('main-content'); -// Global state -let pulseChart = null; + // Simple router + const routes = { + '#dashboard': '

Dashboard

', + '#councils': '

Councils

', + '#tasks': '

Tasks

', + '#repositories': '

Repositories

', + '#analysis': '

Analysis

', + }; -// Initialize on DOM load -document.addEventListener('DOMContentLoaded', function() { - initializeTabs(); - loadDashboard(); - initializePulseVisualization(); - - // Setup form submission handler - const repositoryForm = document.getElementById('repository-form'); - if (repositoryForm) { - repositoryForm.addEventListener('submit', handleRepositorySubmit); + function router() { + const hash = window.location.hash || '#dashboard'; + const [route, param] = hash.split('/'); + + if (route === '#councils' && param) { + loadCouncilDetail(param); + } else if (route === '#tasks' && param) { + loadTaskDetail(param); + } else { + mainContent.innerHTML = routes[hash] || '

Page Not Found

'; + loadContent(hash); + } } -}); -// Tab management -function initializeTabs() { - const tabs = document.querySelectorAll('.nav-tab'); - tabs.forEach(tab => { - tab.addEventListener('click', () => showTab(tab.dataset.tab)); - }); -} - -function showTab(tabId) { - // Hide all tab contents - const contents = document.querySelectorAll('.tab-content'); - contents.forEach(content => { - content.classList.remove('active'); - }); - - // Remove active class from all tabs - const tabs = document.querySelectorAll('.nav-tab'); - tabs.forEach(tab => { - tab.classList.remove('active'); - }); - - // Show selected tab content - const selectedContent = document.getElementById(tabId); - if (selectedContent) { - selectedContent.classList.add('active'); + async function loadContent(hash) { + switch (hash) { + case '#dashboard': + loadDashboard(); + break; + case '#councils': + loadCouncils(); + break; + case '#tasks': + loadTasks(); + break; + case '#repositories': + loadRepositories(); + break; + case '#analysis': + loadAnalysis(); + break; + } } - - // Activate selected tab - const selectedTab = document.querySelector(`[data-tab="${tabId}"]`); - if (selectedTab) { - selectedTab.classList.add('active'); + + const loadingSpinner = document.getElementById('loading-spinner'); + let activeSpinners = 0; + + function showSpinner() { + loadingSpinner.classList.remove('hidden'); } - - // Load content for specific tabs - switch(tabId) { - case 'tasks': - loadTasks(); - break; - case 'teams': - loadTeams(); - break; - case 'agents': - loadAgents(); - break; - case 'repositories': - loadRepositories(); - break; + + function hideSpinner() { + loadingSpinner.classList.add('hidden'); } -} -// Dashboard data loading -function loadDashboard() { - loadSystemMetrics(); - loadRecentActivity(); - loadSystemStatus(); -} + function incSpinner() { + activeSpinners += 1; + if (activeSpinners === 1) showSpinner(); + } -function loadSystemMetrics() { - fetch('/api/v1/metrics') - .then(response => response.json()) - .then(data => { - updateMetric('active-councils', data.active_councils || 0); - updateMetric('deployed-agents', data.deployed_agents || 0); - updateMetric('completed-tasks', data.completed_tasks || 0); - }) - .catch(error => { - console.error('Error loading metrics:', error); - }); -} + function decSpinner() { + if (activeSpinners > 0) { + activeSpinners -= 1; + } + if (activeSpinners === 0) hideSpinner(); + } -function loadRecentActivity() { - fetch('/api/v1/activity/recent') - .then(response => response.json()) - .then(data => { - const container = document.getElementById('recent-activity'); - if (data && data.length > 0) { - container.innerHTML = data.map(activity => - `
- ${activity.title} -
${activity.timestamp}
-
` - ).join(''); - } else { - container.innerHTML = ` -
Empty
-

No recent activity

- `; + async function apiFetch(endpoint) { + incSpinner(); + try { + const authHeaders = getAuthHeaders(); + const response = await fetch(`/api${endpoint}`, { headers: authHeaders }); + if (!response.ok) { + throw new Error(`API request failed: ${response.statusText}`); } - }) - .catch(error => { - console.error('Error loading recent activity:', error); - }); -} - -function loadSystemStatus() { - fetch('/api/v1/status') - .then(response => response.json()) - .then(data => { - updateStatus('database', data.database || 'healthy'); - updateStatus('gitea-integration', data.gitea || 'connected'); - updateStatus('backbeat', data.backbeat || 'active'); - }) - .catch(error => { - console.error('Error loading system status:', error); - }); -} - -function updateMetric(id, value) { - const element = document.querySelector(`[data-metric="${id}"], .metric-value`); - if (element) { - element.textContent = value; + return response.json(); + } finally { + decSpinner(); + } } -} -function updateStatus(component, status) { - // Status indicators are currently hardcoded in HTML - console.log(`Status update: ${component} = ${status}`); -} - -// BACKBEAT pulse visualization -function initializePulseVisualization() { - const canvas = document.getElementById('pulse-trace'); - if (!canvas) return; - - const ctx = canvas.getContext('2d'); - canvas.width = canvas.offsetWidth; - canvas.height = 60; - - // Initialize pulse visualization - updatePulseVisualization(); - - // Update every second - setInterval(updatePulseVisualization, 1000); -} - -function updatePulseVisualization() { - fetch('/api/v1/backbeat/status') - .then(response => response.json()) - .then(data => { - updateBeatMetrics(data); - drawPulseTrace(data); - }) - .catch(error => { - // Use mock data for demonstration - const mockData = { - tempo: Math.floor(Math.random() * 40) + 60, - volume: Math.floor(Math.random() * 30) + 70, - phase: ['rising', 'peak', 'falling', 'valley'][Math.floor(Math.random() * 4)], - trace: Array.from({length: 50}, () => Math.random() * 100) + async function fetchText(url, options = {}) { + incSpinner(); + try { + const authHeaders = getAuthHeaders(); + const merged = { + ...options, + headers: { ...(options.headers || {}), ...authHeaders }, }; - updateBeatMetrics(mockData); - drawPulseTrace(mockData); - }); -} - -function updateBeatMetrics(data) { - const tempoEl = document.getElementById('beat-tempo'); - const volumeEl = document.getElementById('beat-volume'); - const phaseEl = document.getElementById('beat-phase'); - - if (tempoEl) tempoEl.textContent = data.tempo + ' BPM'; - if (volumeEl) volumeEl.textContent = data.volume + '%'; - if (phaseEl) phaseEl.textContent = data.phase; -} - -function drawPulseTrace(data) { - const canvas = document.getElementById('pulse-trace'); - if (!canvas) return; - - const ctx = canvas.getContext('2d'); - const width = canvas.width; - const height = canvas.height; - - // Clear canvas - ctx.fillStyle = 'var(--carbon-800)'; - ctx.fillRect(0, 0, width, height); - - if (!data.trace || data.trace.length === 0) return; - - // Draw pulse trace - ctx.strokeStyle = 'var(--ocean-400)'; - ctx.lineWidth = 2; - ctx.beginPath(); - - const stepX = width / (data.trace.length - 1); - - data.trace.forEach((point, index) => { - const x = index * stepX; - const y = height - (point / 100 * height); - - if (index === 0) { - ctx.moveTo(x, y); - } else { - ctx.lineTo(x, y); + const response = await fetch(url, merged); + if (!response.ok) { + throw new Error(`Request failed: ${response.status} ${response.statusText}`); + } + return response.text(); + } finally { + decSpinner(); } - }); - - ctx.stroke(); -} + } -// Task management -function refreshTasks() { - loadTasks(); -} + function getAuthHeaders() { + const token = (localStorage.getItem('whoosh_token') || getCookie('whoosh_token') || '').trim(); + if (!token) return {}; + return { 'Authorization': `Bearer ${token}` }; + } -function loadTasks() { - Promise.all([ - fetch('/api/v1/tasks/active').then(r => r.json()).catch(() => []), - fetch('/api/v1/tasks/scheduled').then(r => r.json()).catch(() => []) - ]).then(([activeTasks, scheduledTasks]) => { - renderTasks('active-tasks', activeTasks); - renderTasks('scheduled-tasks', scheduledTasks); - }); -} + function getCookie(name) { + const value = `; ${document.cookie}`; + const parts = value.split(`; ${name}=`); + if (parts.length === 2) return parts.pop().split(';').shift(); + return ''; + } -function renderTasks(containerId, tasks) { - const container = document.getElementById(containerId); - if (!container) return; - - if (!tasks || tasks.length === 0) { - const isActive = containerId === 'active-tasks'; - const icon = isActive ? 'List_Check.png' : 'Calendar.png'; - const message = isActive ? 'No active tasks found' : 'No scheduled tasks found'; - - container.innerHTML = ` -
-
- No tasks + async function loadDashboard() { + const dashboardContent = document.getElementById('dashboard-content'); + try { + const health = await apiFetch('/admin/health/details'); + const metrics = await fetchText('/metrics'); + + // A real app would parse the metrics properly + dashboardContent.innerHTML = ` +
+
+

System Status

+

Status: ${health.status}

+

Version: ${health.version}

+
+
+

Metrics

+
${metrics.slice(0, 1000)}...
+
-

${message}

+ `; + } catch (error) { + dashboardContent.innerHTML = `

Error loading dashboard: ${error.message}

`; + } + } + + async function loadCouncils() { + const councilsContent = document.getElementById('councils-content'); + try { + const data = await apiFetch('/v1/councils'); + councilsContent.innerHTML = ` +
+ ${data.councils.map(council => ` +
+

${council.project_name}

+

Status: ${council.status}

+
+ +
+
+ `).join('')} +
+ `; + + // Wire delete buttons for projects (councils) + const deleteBtns = document.querySelectorAll('.delete-project-btn'); + deleteBtns.forEach(btn => { + btn.addEventListener('click', async (event) => { + const projectId = event.target.dataset.projectId; + const projectName = event.target.dataset.projectName || projectId; + if (!confirm(`Delete project "${projectName}"? This removes the council record.`)) return; + try { + await fetchText(`/api/v1/projects/${projectId}`, { method: 'DELETE' }); + loadCouncils(); + } catch (error) { + alert(`Error deleting project: ${error.message}`); + } + }); + }); + } catch (error) { + councilsContent.innerHTML = `

Error loading councils: ${error.message}

`; + } + } + + async function loadCouncilDetail(councilId) { + const councilContent = document.getElementById('main-content'); + try { + const council = await apiFetch(`/v1/councils/${councilId}`); + const artifacts = await apiFetch(`/v1/councils/${councilId}/artifacts`); + + // Normalize server composition (core_agents / optional_agents) into a flat list for the UI table + const core = (council.core_agents || []).map(a => ({ + role_name: a.role_name, + required: true, + status: a.status, + agent_id: a.agent_id, + agent_name: a.agent_name, + })); + const optional = (council.optional_agents || []).map(a => ({ + role_name: a.role_name, + required: false, + status: a.status, + agent_id: a.agent_id, + agent_name: a.agent_name, + })); + const agents = [...core, ...optional]; + + const agentRows = agents.map(agent => ` + + ${agent.role_name} + ${agent.required ? 'Core' : 'Optional'} + ${agent.status || 'unknown'} + ${agent.agent_id || 'β€”'} + ${agent.agent_name || 'β€”'} + + `).join(''); + + const artifactItems = artifacts.artifacts && artifacts.artifacts.length + ? artifacts.artifacts.map(artifact => `
  • ${artifact.artifact_name} - ${artifact.status}
  • `).join('') + : '
  • No artifacts recorded yet
  • '; + + councilContent.innerHTML = ` +

    ${council.project_name || ''}

    +
    +
    +

    Council Details

    +

    Status: ${council.status || 'unknown'}

    + ${council.repository ? `

    Repository: ${council.repository}

    ` : ''} + ${council.project_brief ? `

    Project Brief: ${council.project_brief}

    ` : ''} +
    + +
    +
    +
    +

    Role Fulfilment

    +
    + + + + + + + + + + + + ${agentRows || ''} + +
    RoleTypeStatusAgent IDAgent Name
    No agents yet
    +
    +
    +
    +
    +

    Artifacts

    +
      + ${artifactItems} +
    +
    + `; + + // Wire delete in detail view + const delBtn = document.getElementById('delete-project-detail'); + if (delBtn) { + delBtn.addEventListener('click', async (event) => { + const projectId = event.target.dataset.projectId; + const projectName = event.target.dataset.projectName || projectId; + if (!confirm(`Delete project "${projectName}"?`)) return; + try { + await fetchText(`/api/v1/projects/${projectId}`, { method: 'DELETE' }); + window.location.hash = '#councils'; + loadCouncils(); + } catch (error) { + alert(`Error deleting project: ${error.message}`); + } + }); + } + } catch (error) { + councilContent.innerHTML = `

    Error loading council details: ${error.message}

    `; + } + } + + async function loadTasks() { + const tasksContent = document.getElementById('tasks-content'); + try { + const data = await apiFetch('/v1/tasks'); + tasksContent.innerHTML = ` +
    + ${data.tasks.map(task => ` +
    +

    ${task.title}

    +

    Status: ${task.status}

    +

    Priority: ${task.priority}

    +
    + `).join('')} +
    + `; + } catch (error) { + tasksContent.innerHTML = `

    Error loading tasks: ${error.message}

    `; + } + } + + async function loadTaskDetail(taskId) { + const taskContent = document.getElementById('main-content'); + try { + const task = await apiFetch(`/v1/tasks/${taskId}`); + + taskContent.innerHTML = ` +

    ${task.title}

    +
    +

    Task Details

    +
    +
    +

    Status: ${task.status}

    +

    Priority: ${task.priority}

    +
    +
    +

    Help Promises: (Not implemented)

    +

    Retry Budgets: (Not implemented)

    +
    +
    +
    +

    Description:

    +

    ${task.description}

    +
    + `; + } catch (error) { + taskContent.innerHTML = `

    Error loading task details: ${error.message}

    `; + } + } + + async function loadRepositories() { + const repositoriesContent = document.getElementById('repositories-content'); + try { + const data = await apiFetch('/v1/repositories'); + repositoriesContent.innerHTML = ` +
    +

    Add Repository

    +
    + + + +
    +
    +
    + ${data.repositories.map(repo => ` +
    +

    ${repo.full_name}

    +

    Status: ${repo.sync_status}

    + + +
    + `).join('')} +
    + `; + + const addRepoForm = document.getElementById('add-repo-form'); + addRepoForm.addEventListener('submit', async (event) => { + event.preventDefault(); + const repoName = event.target['repo-name'].value; + try { + // Expect input like "owner/repo"; build WHOOSH payload + const parts = (repoName || '').split('/').map(s => s.trim()).filter(Boolean); + if (parts.length !== 2) { + throw new Error('Please enter repository as "owner/repo"'); + } + const [owner, name] = parts; + const url = `https://gitea.chorus.services/${owner}/${name}`; + + const payload = { + name, + owner, + url, + source_type: 'gitea', + monitor_issues: true, + monitor_pull_requests: true, + enable_chorus_integration: true, + default_branch: 'main', + is_private: false, + topics: [], + }; + + await fetchText('/api/v1/repositories', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(payload), + }); + loadRepositories(); // Refresh the list + } catch (error) { + alert(`Error adding repository: ${error.message}`); + } + }); + + const syncRepoBtns = document.querySelectorAll('.sync-repo-btn'); + syncRepoBtns.forEach(btn => { + btn.addEventListener('click', async (event) => { + const repoId = event.target.dataset.repoId; + try { + await fetchText(`/api/v1/repositories/${repoId}/sync`, { method: 'POST' }); + loadRepositories(); // Refresh the list + } catch (error) { + alert(`Error syncing repository: ${error.message}`); + } + }); + }); + + const deleteRepoBtns = document.querySelectorAll('.delete-repo-btn'); + deleteRepoBtns.forEach(btn => { + btn.addEventListener('click', async (event) => { + const repoId = event.target.dataset.repoId; + if (!confirm('Delete this repository from monitoring?')) return; + try { + await fetchText(`/api/v1/repositories/${repoId}`, { method: 'DELETE' }); + loadRepositories(); + } catch (error) { + alert(`Error deleting repository: ${error.message}`); + } + }); + }); + + } catch (error) { + repositoriesContent.innerHTML = `

    Error loading repositories: ${error.message}

    `; + } + } + + function loadAnalysis() { + const analysisContent = document.getElementById('analysis-content'); + analysisContent.innerHTML = ` +
    +

    Project Analysis

    +
    + + + +
    +
    `; - return; - } - - container.innerHTML = tasks.map(task => { - const priorityClass = task.priority ? `priority-${task.priority.toLowerCase()}` : ''; - return ` -
    -
    ${task.title || 'Untitled Task'}
    -
    - Priority: ${task.priority || 'Normal'} - ${task.created_at || ''} -
    -
    - `; - }).join(''); -} -// Team management -function loadTeams() { - fetch('/api/v1/teams') - .then(response => response.json()) - .then(teams => { - renderTeams(teams); - }) - .catch(error => { - console.error('Error loading teams:', error); - renderTeams([]); + const analysisForm = document.getElementById('analysis-form'); + analysisForm.addEventListener('submit', async (event) => { + event.preventDefault(); + const repoUrl = event.target['repo-url'].value; + const resultsContainer = document.getElementById('analysis-results'); + resultsContainer.innerHTML = '

    Analyzing...

    '; + incSpinner(); + + try { + const response = await fetch('/api/projects/analyze', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ repository_url: repoUrl, project_name: 'Analysis' }), + }); + + if (!response.ok) { + throw new Error(`Analysis request failed: ${response.statusText}`); + } + + const result = await response.json(); + resultsContainer.innerHTML = ` +
    +

    Analysis Initiated

    +

    Analysis ID: ${result.analysis_id}

    +

    Status: ${result.status}

    +

    Estimated Completion: ${result.estimated_completion_minutes} minutes

    +

    Tracking URL: ${result.tracking_url}

    +

    Please check the tracking URL for updates.

    +
    + `; + } catch (error) { + resultsContainer.innerHTML = `

    Analysis failed: ${error.message}

    `; + } finally { + decSpinner(); + } }); -} - -function renderTeams(teams) { - const container = document.getElementById('teams-list'); - if (!container) return; - - if (!teams || teams.length === 0) { - container.innerHTML = ` -
    -
    - No teams -
    -

    No teams configured yet

    -
    - `; - return; } - - container.innerHTML = teams.map(team => ` -
    -
    -
    - ${team.name} -
    ${team.description || ''}
    -
    -
    - `).join(''); -} -// Agent management -function loadAgents() { - fetch('/api/v1/agents') - .then(response => response.json()) - .then(agents => { - renderAgents(agents); - }) - .catch(error => { - console.error('Error loading agents:', error); - renderAgents([]); - }); -} + // Safety: ensure spinner can’t get stuck on due to unhandled errors + window.addEventListener('error', () => { activeSpinners = 0; hideSpinner(); }); + window.addEventListener('unhandledrejection', () => { activeSpinners = 0; hideSpinner(); }); -function renderAgents(agents) { - const container = document.getElementById('agents-list'); - if (!container) return; - - if (!agents || agents.length === 0) { - container.innerHTML = ` -
    -
    - No agents -
    -

    No agents registered yet

    -
    - `; - return; - } - - container.innerHTML = agents.map(agent => ` -
    -
    -
    - ${agent.name} -
    -
    - ${agent.description || 'No description available'} -
    -
    - `).join(''); -} + // Auth UI + function initAuthUI() { + const statusEl = document.getElementById('auth-status'); + const inputEl = document.getElementById('auth-token-input'); + const saveBtn = document.getElementById('save-auth-token'); + const clearBtn = document.getElementById('clear-auth-token'); -// Repository management -function showAddRepositoryForm() { - document.getElementById('add-repository-form').style.display = 'block'; -} - -function hideAddRepositoryForm() { - document.getElementById('add-repository-form').style.display = 'none'; - document.getElementById('repository-form').reset(); -} - -function handleRepositorySubmit(e) { - e.preventDefault(); - - const formData = { - name: document.getElementById('repo-name').value.trim(), - owner: document.getElementById('repo-owner').value.trim(), - url: document.getElementById('repo-url').value.trim(), - source_type: document.getElementById('repo-source-type').value, - default_branch: document.getElementById('repo-branch').value.trim() || 'main', - description: document.getElementById('repo-description').value.trim(), - monitor_issues: document.getElementById('repo-monitor-issues').checked, - enable_chorus_integration: document.getElementById('repo-enable-chorus').checked - }; - - if (!formData.name || !formData.owner || !formData.url) { - alert('Please fill in all required fields'); - return; - } - - fetch('/api/v1/repositories', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(formData) - }) - .then(response => response.json()) - .then(data => { - if (data.error) { - alert('Error adding repository: ' + data.error); - } else { - alert('Repository added successfully!'); - hideAddRepositoryForm(); - loadRepositories(); - updateRepositoryStats(); + function updateStatus() { + const token = (localStorage.getItem('whoosh_token') || getCookie('whoosh_token') || '').trim(); + if (token) { + statusEl.textContent = 'Authed'; + statusEl.classList.add('authed'); + } else { + statusEl.textContent = 'Guest'; + statusEl.classList.remove('authed'); + } } - }) - .catch(error => { - console.error('Error adding repository:', error); - alert('Error adding repository'); - }); -} -function loadRepositories() { - fetch('/api/v1/repositories') - .then(response => response.json()) - .then(repositories => { - renderRepositories(repositories); - updateRepositoryStats(); - }) - .catch(error => { - console.error('Error loading repositories:', error); - renderRepositories([]); + saveBtn.addEventListener('click', () => { + const v = (inputEl.value || '').trim(); + if (!v) { alert('Token is empty'); return; } + localStorage.setItem('whoosh_token', v); + updateStatus(); + inputEl.value = ''; + alert('Token saved. Actions requiring auth should now work.'); }); -} -function updateRepositoryStats() { - fetch('/api/v1/repositories/stats') - .then(response => response.json()) - .then(stats => { - const totalEl = document.getElementById('total-repos'); - const activeEl = document.getElementById('active-repos'); - const lastSyncEl = document.getElementById('last-sync'); - - if (totalEl) totalEl.textContent = stats.total || 0; - if (activeEl) activeEl.textContent = stats.active || 0; - if (lastSyncEl) lastSyncEl.textContent = stats.last_sync || 'Never'; - }) - .catch(error => { - console.error('Error loading repository stats:', error); + clearBtn.addEventListener('click', () => { + localStorage.removeItem('whoosh_token'); + document.cookie = 'whoosh_token=; Max-Age=0; path=/'; + updateStatus(); }); -} -function renderRepositories(repositories) { - const container = document.getElementById('repositories-list'); - if (!container) return; - - if (!repositories || repositories.length === 0) { - container.innerHTML = '

    No repositories found

    '; - return; + updateStatus(); } - - const html = repositories.map(repo => - '
    ' + - '
    ' + - '

    ' + repo.full_name + '

    ' + - '
    ' + - '
    ' + - '' + (repo.status || 'unknown') + '' + - '
    ' + - '
    ' + - - '
    ' + - '
    Language: ' + (repo.language || 'Not detected') + '
    ' + - '
    Default Branch: ' + (repo.default_branch || 'main') + '
    ' + - '
    Source: ' + (repo.source_type || 'git') + '
    ' + - (repo.description ? '
    Description: ' + repo.description + '
    ' : '') + - '
    ' + - - '
    ' + - '
    Issues: ' + (repo.monitor_issues ? 'βœ… Monitored' : '❌ Not monitored') + '
    ' + - '
    Pull Requests: ' + (repo.monitor_pull_requests ? 'βœ… Monitored' : '❌ Not monitored') + '
    ' + - '
    Releases: ' + (repo.monitor_releases ? 'βœ… Monitored' : '❌ Not monitored') + '
    ' + - '
    CHORUS: ' + (repo.enable_chorus_integration ? 'βœ… Enabled' : '❌ Disabled') + '
    ' + - '
    ' + - - '
    ' + - '' + - '' + - '' + - '' + - '
    ' + - '
    ' - ).join(''); - - container.innerHTML = html; -} -function getStatusColor(status) { - switch(status) { - case 'active': return 'var(--eucalyptus-500)'; - case 'pending': return 'var(--coral-700)'; - case 'error': return 'var(--coral-500)'; - case 'disabled': return 'var(--carbon-400)'; - default: return 'var(--carbon-500)'; - } -} - -function syncRepository(repoId) { - fetch('/api/v1/repositories/' + repoId + '/sync', { - method: 'POST' - }) - .then(response => response.json()) - .then(data => { - alert('Repository sync triggered: ' + data.message); - loadRepositories(); // Reload to show updated status - }) - .catch(error => { - console.error('Error syncing repository:', error); - alert('Error syncing repository'); - }); -} - -function ensureLabels(repoId) { - fetch('/api/v1/repositories/' + repoId + '/ensure-labels', { - method: 'POST' - }) - .then(response => response.json()) - .then(data => { - if (data.error) { - alert('Error ensuring labels: ' + data.error); - } else { - alert('Labels ensured successfully for ' + data.owner + '/' + data.name + '\n\nRequired labels created:\nβ€’ bzzz-task\nβ€’ whoosh-monitored\nβ€’ priority-high\nβ€’ priority-medium\nβ€’ priority-low'); - } - }) - .catch(error => { - console.error('Error ensuring labels:', error); - alert('Error ensuring labels'); - }); -} - -function editRepository(repoId) { - // Fetch repository details first - fetch('/api/v1/repositories/' + repoId) - .then(response => response.json()) - .then(repo => { - showEditModal(repo); - }) - .catch(error => { - console.error('Error fetching repository:', error); - alert('Error fetching repository details'); - }); -} - -function showEditModal(repo) { - // Create modal overlay - const overlay = document.createElement('div'); - overlay.style.cssText = 'position: fixed; top: 0; left: 0; width: 100%; height: 100%; ' + - 'background: rgba(0,0,0,0.5); display: flex; align-items: center; ' + - 'justify-content: center; z-index: 1000;'; - - // Create modal content - const modal = document.createElement('div'); - modal.style.cssText = 'background: white; border-radius: 8px; padding: 24px; ' + - 'max-width: 500px; width: 90%; max-height: 80vh; overflow-y: auto;'; - - modal.innerHTML = - '

    Edit Repository

    ' + - '
    ' + - '' + repo.full_name + '' + - '
    ID: ' + repo.id + '
    ' + - '
    ' + - - '
    ' + - '
    ' + - '' + - '' + - '
    ' + - - '
    ' + - '' + - '' + - '
    ' + - - '
    ' + - '' + - '' + - '
    ' + - - '
    ' + - '

    Monitoring Options:

    ' + - '
    ' + - '' + - '
    ' + - '
    ' + - '' + - '
    ' + - '
    ' + - '' + - '
    ' + - '
    ' + - - '
    ' + - '

    CHORUS Integration:

    ' + - '
    ' + - '' + - '
    ' + - '
    ' + - '' + - '
    ' + - '
    ' + - - '
    ' + - '' + - '' + - '
    ' + - '
    '; - - overlay.appendChild(modal); - document.body.appendChild(overlay); - - // Store modal reference globally so we can close it - window.currentEditModal = overlay; - window.currentRepoId = repo.id; - - // Handle form submission - document.getElementById('editRepoForm').addEventListener('submit', function(e) { - e.preventDefault(); - saveRepositoryChanges(); - }); - - // Close modal on overlay click - overlay.addEventListener('click', function(e) { - if (e.target === overlay) { - closeEditModal(); - } - }); -} - -function closeEditModal() { - if (window.currentEditModal) { - document.body.removeChild(window.currentEditModal); - window.currentEditModal = null; - window.currentRepoId = null; - } -} - -function saveRepositoryChanges() { - const formData = { - description: document.getElementById('description').value.trim() || null, - default_branch: document.getElementById('defaultBranch').value.trim() || null, - language: document.getElementById('language').value.trim() || null, - monitor_issues: document.getElementById('monitorIssues').checked, - monitor_pull_requests: document.getElementById('monitorPRs').checked, - monitor_releases: document.getElementById('monitorReleases').checked, - enable_chorus_integration: document.getElementById('enableChorus').checked, - auto_assign_teams: document.getElementById('autoAssignTeams').checked - }; - - fetch('/api/v1/repositories/' + window.currentRepoId, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(formData) - }) - .then(response => response.json()) - .then(data => { - alert('Repository updated successfully!'); - closeEditModal(); - loadRepositories(); // Reload the list to show changes - }) - .catch(error => { - console.error('Error updating repository:', error); - alert('Error updating repository'); - }); -} - -function deleteRepository(repoId, fullName) { - if (confirm('Are you sure you want to delete repository "' + fullName + '"? This will stop monitoring and cannot be undone.')) { - fetch('/api/v1/repositories/' + repoId, { - method: 'DELETE' - }) - .then(response => response.json()) - .then(data => { - alert('Repository deleted: ' + data.message); - loadRepositories(); // Reload the list - }) - .catch(error => { - console.error('Error deleting repository:', error); - alert('Error deleting repository'); - }); - } -} \ No newline at end of file + // Initial load + initAuthUI(); + router(); + window.addEventListener('hashchange', router); +}); diff --git a/ui/styles.css b/ui/styles.css index 2d3234c..75e4a63 100644 --- a/ui/styles.css +++ b/ui/styles.css @@ -1,463 +1,249 @@ -/* CHORUS Brand Variables */ -:root { - font-size: 18px; /* CHORUS proportional base */ - /* Carbon Colors (Primary Neutral) */ - --carbon-950: #000000; - --carbon-900: #0a0a0a; - --carbon-800: #1a1a1a; - --carbon-700: #2a2a2a; - --carbon-600: #666666; - --carbon-500: #808080; - --carbon-400: #a0a0a0; - --carbon-300: #c0c0c0; - --carbon-200: #e0e0e0; - --carbon-100: #f0f0f0; - --carbon-50: #f8f8f8; - - /* Mulberry Colors (Brand Accent) */ - --mulberry-950: #0b0213; - --mulberry-900: #1a1426; - --mulberry-800: #2a2639; - --mulberry-700: #3a384c; - --mulberry-600: #4a4a5f; - --mulberry-500: #5a5c72; - --mulberry-400: #7a7e95; - --mulberry-300: #9aa0b8; - --mulberry-200: #bac2db; - --mulberry-100: #dae4fe; - --mulberry-50: #f0f4ff; - - /* Ocean Colors (Primary Action) */ - --ocean-950: #2a3441; - --ocean-900: #3a4654; - --ocean-800: #4a5867; - --ocean-700: #5a6c80; - --ocean-600: #6a7e99; - --ocean-500: #7a90b2; - --ocean-400: #8ba3c4; - --ocean-300: #9bb6d6; - --ocean-200: #abc9e8; - --ocean-100: #bbdcfa; - --ocean-50: #cbefff; - - /* Eucalyptus Colors (Success) */ - --eucalyptus-950: #2a3330; - --eucalyptus-900: #3a4540; - --eucalyptus-800: #4a5750; - --eucalyptus-700: #515d54; - --eucalyptus-600: #5a6964; - --eucalyptus-500: #6a7974; - --eucalyptus-400: #7a8a7f; - --eucalyptus-300: #8a9b8f; - --eucalyptus-200: #9aac9f; - --eucalyptus-100: #aabdaf; - --eucalyptus-50: #bacfbf; - - /* Coral Colors (Error/Warning) */ - --coral-700: #dc2626; - --coral-500: #ef4444; - --coral-300: #fca5a5; -} - -/* Base Styles with CHORUS Branding */ -body { - font-family: 'Inter Tight', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; - margin: 0; - padding: 0; - background: var(--carbon-950); - color: var(--carbon-100); +/* Basic Styles */ +body { + font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif; + margin: 0; + background-color: #f4f7f6; + color: #333; line-height: 1.6; } -/* CHORUS Dark Mode Header */ -.header { - background: linear-gradient(135deg, var(--carbon-900) 0%, var(--mulberry-900) 100%); - color: white; - padding: 1.33rem 0; /* 24px at 18px base */ - border-bottom: 1px solid var(--mulberry-800); +#app { + display: flex; + flex-direction: column; + min-height: 100vh; +} + +header { + background-color: #2c3e50; /* Darker header for contrast */ + padding: 1rem 2rem; + border-bottom: 1px solid #34495e; display: flex; justify-content: space-between; align-items: center; + color: #ecf0f1; +} + +header h1 { + margin: 0; + font-size: 1.8rem; + color: #ecf0f1; +} + +nav a { + margin: 0 1rem; + text-decoration: none; + color: #bdc3c7; /* Lighter grey for navigation */ + font-weight: 500; + transition: color 0.3s ease; +} + +nav a:hover { + color: #ecf0f1; +} + +#auth-controls { + display: flex; + align-items: center; + gap: 0.5rem; +} + +#auth-status { + font-size: 0.9rem; + padding: 0.25rem 0.5rem; + border-radius: 4px; + background: #7f8c8d; +} + +#auth-status.authed { + background: #2ecc71; +} + +#auth-token-input { + width: 220px; + padding: 0.4rem 0.6rem; + border: 1px solid #95a5a6; + border-radius: 4px; + background: #ecf0f1; + color: #2c3e50; +} + +main { + flex-grow: 1; + padding: 2rem; max-width: 1200px; margin: 0 auto; - padding-left: 1.33rem; - padding-right: 1.33rem; + width: 100%; } -.header-content { - max-width: 1200px; - margin: 0 auto; - padding: 0 1.33rem; - display: flex; - justify-content: space-between; - align-items: center; +/* Reusable Components */ +.card { + background-color: #fff; + border-radius: 8px; + box-shadow: 0 4px 8px rgba(0,0,0,0.05); + padding: 1.5rem; + margin-bottom: 2rem; + animation: card-fade-in 0.5s ease-in-out; + border: 1px solid #e0e0e0; } -.logo { - font-family: 'Exo', sans-serif; - font-size: 1.33rem; /* 24px at 18px base */ - font-weight: 300; - color: white; - display: flex; - align-items: center; - gap: 0.67rem; +@keyframes card-fade-in { + from { + opacity: 0; + transform: translateY(20px); + } + to { + opacity: 1; + transform: translateY(0); + } } -.logo .tagline { - font-size: 0.78rem; - color: var(--mulberry-300); - font-weight: 400; -} - -.logo::before { - content: ""; - font-size: 1.5rem; -} - -.status-info { - display: flex; - align-items: center; - color: var(--eucalyptus-400); - font-size: 0.78rem; -} - -.status-dot { - width: 0.67rem; - height: 0.67rem; - border-radius: 50%; - background: var(--eucalyptus-400); - margin-right: 0.44rem; - display: inline-block; -} - -/* CHORUS Navigation */ -.nav { - max-width: 1200px; - margin: 0 auto; - padding: 0 1.33rem; - display: flex; - border-bottom: 1px solid var(--mulberry-800); - background: var(--carbon-900); -} - -.nav-tab { - padding: 0.83rem 1.39rem; - cursor: pointer; - border-bottom: 3px solid transparent; - font-weight: 500; - transition: all 0.2s; - color: var(--mulberry-300); - background: none; +.button { + background-color: #3498db; /* A vibrant blue */ + color: #fff; + padding: 0.75rem 1.5rem; border: none; - font-family: inherit; + border-radius: 4px; + cursor: pointer; + font-size: 1rem; + transition: background-color 0.3s ease; } -.nav-tab.active { - border-bottom-color: var(--ocean-500); - color: var(--ocean-300); - background: var(--carbon-800); +.button:hover { + background-color: #2980b9; } -.nav-tab:hover { - background: var(--carbon-800); - color: var(--ocean-400); +.button.danger { + background-color: #e74c3c; } -.content { - max-width: 1200px; - margin: 0 auto; - padding: 1.33rem; +.button.danger:hover { + background-color: #c0392b; } -.tab-content { - display: none; +.error { + color: #e74c3c; + font-weight: bold; } -.tab-content.active { - display: block; +/* Grid Layouts */ +.dashboard-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(400px, 1fr)); + grid-gap: 2rem; } -/* CHORUS Card System */ -.dashboard-grid { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(350px, 1fr)); - gap: 1.33rem; - margin-bottom: 2rem; +.grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); + grid-gap: 2rem; } -.card { - background: var(--carbon-900); - border-radius: 0; - padding: 1.33rem; - box-shadow: 0 0.22rem 0.89rem rgba(0,0,0,0.3); - border: 1px solid var(--mulberry-800); +.card.full-width { + grid-column: 1 / -1; } -.card h3 { - margin: 0 0 1rem 0; - color: var(--carbon-100); - font-size: 1rem; - display: flex; - align-items: center; +.table-wrapper { + width: 100%; + overflow-x: auto; +} + +.role-table { + width: 100%; + border-collapse: collapse; + font-size: 0.95rem; +} + +.role-table th, +.role-table td { + padding: 0.75rem 0.5rem; + border-bottom: 1px solid #e0e0e0; + text-align: left; +} + +.role-table th { + background-color: #f2f4f7; font-weight: 600; + color: #2c3e50; } -.card h2 { - margin: 0 0 1rem 0; - color: var(--carbon-100); - font-size: 1.33rem; - display: flex; - align-items: center; - font-weight: 600; -} - -.card-icon { - width: 1.33rem; - height: 1.33rem; - margin-right: 0.67rem; -} - -/* Metrics with CHORUS Colors */ -.metric { - display: flex; - justify-content: space-between; - margin: 0.44rem 0; - padding: 0.44rem 0; -} - -.metric:not(:last-child) { - border-bottom: 1px solid var(--mulberry-900); -} - -.metric-label { - color: var(--mulberry-300); -} - -.metric-value { - font-weight: 600; - color: var(--carbon-100); -} - -/* Task Items with CHORUS Brand Colors */ -.task-item { - background: var(--carbon-800); - border-radius: 0; - padding: 0.89rem; - margin-bottom: 0.67rem; - border-left: 4px solid var(--mulberry-600); -} - -.task-item.priority-high { - border-left-color: var(--coral-500); -} - -.task-item.priority-medium { - border-left-color: var(--ocean-500); -} - -.task-item.priority-low { - border-left-color: var(--eucalyptus-500); -} - -.task-title { - font-weight: 600; - color: var(--carbon-100); - margin-bottom: 0.44rem; -} - -.task-meta { - display: flex; - justify-content: space-between; - color: var(--mulberry-300); - font-size: 0.78rem; -} - -/* Agent Cards */ -.agent-card { - background: var(--carbon-800); - border-radius: 0; - padding: 0.89rem; - margin-bottom: 0.67rem; -} - -.agent-status { - width: 0.44rem; - height: 0.44rem; - border-radius: 50%; - margin-right: 0.44rem; - display: inline-block; -} - -.agent-status.online { - background: var(--eucalyptus-400); -} - -.agent-status.offline { - background: var(--carbon-500); -} - -.team-member { - display: flex; - align-items: center; - padding: 0.44rem; - background: var(--carbon-900); - border-radius: 0; - margin-bottom: 0.44rem; -} - -/* CHORUS Button System */ -.btn { - padding: 0.44rem 0.89rem; - border-radius: 0.375rem; - border: none; - font-weight: 500; - cursor: pointer; - transition: all 0.2s; - font-family: 'Inter Tight', sans-serif; -} - -.btn-primary { - background: var(--ocean-600); - color: white; -} - -.btn-primary:hover { - background: var(--ocean-500); -} - -.btn-secondary { - background: var(--mulberry-700); - color: var(--mulberry-200); -} - -.btn-secondary:hover { - background: var(--mulberry-600); -} - -/* Empty States */ -.empty-state { - text-align: center; - padding: 2.22rem 1.33rem; - color: var(--mulberry-300); -} - -.empty-state-icon { - font-size: 2.67rem; - margin-bottom: 0.89rem; - text-align: center; -} - -/* BackBeat Pulse Visualization */ -#pulse-trace { - background: var(--carbon-800); - border-radius: 0; - border: 1px solid var(--mulberry-800); -} - -/* Additional CHORUS Styling */ -.backbeat-label { - color: var(--mulberry-300); - font-size: 0.67rem; - text-align: center; - margin-top: 0.44rem; -} - -/* Modal and Overlay Styling */ -.modal-overlay { - background: rgba(0, 0, 0, 0.8) !important; -} - -.modal-content { - background: var(--carbon-900) !important; - color: var(--carbon-100) !important; - border: 1px solid var(--mulberry-800) !important; -} - -.modal-content input, .modal-content select, .modal-content textarea { - background: var(--carbon-800); - color: var(--carbon-100); - border: 1px solid var(--mulberry-700); - border-radius: 0; - padding: 0.44rem 0.67rem; - font-family: inherit; -} - -.modal-content input:focus, .modal-content select:focus, .modal-content textarea:focus { - border-color: var(--ocean-500); - outline: none; -} - -.modal-content label { - color: var(--mulberry-200); - display: block; - margin-bottom: 0.33rem; - font-weight: 500; -} - -/* Repository Cards */ -.repository-item { - background: var(--carbon-800); - border-radius: 0; - padding: 0.89rem; - margin-bottom: 0.67rem; - border: 1px solid var(--mulberry-800); -} - -.repository-item h4 { - color: var(--carbon-100); - margin: 0 0 0.44rem 0; -} - -.repository-meta { - color: var(--mulberry-300); - font-size: 0.78rem; - margin-bottom: 0.44rem; -} - -/* Success/Error States */ -.success-indicator { - color: var(--eucalyptus-400); -} - -.error-indicator { - color: var(--coral-500); -} - -.warning-indicator { - color: var(--ocean-400); -} - -/* Tabs styling */ -.tabs { - margin-bottom: 1.33rem; -} - -.tabs h4 { - color: var(--carbon-100); - margin-bottom: 0.67rem; - font-size: 0.89rem; - font-weight: 600; -} - -/* Form styling improvements */ -form { - display: flex; - flex-direction: column; - gap: 1rem; -} - -form > div { - display: flex; - flex-direction: column; - gap: 0.33rem; +.role-table tr:hover td { + background-color: #f8f9fb; } +/* Forms */ form label { - font-weight: 500; - color: var(--mulberry-200); + display: block; + margin-bottom: 0.5rem; + font-weight: 600; } -form input[type="checkbox"] { - margin-right: 0.5rem; - accent-color: var(--ocean-500); -} \ No newline at end of file +form input[type="text"] { + width: 100%; + padding: 0.8rem; + margin-bottom: 1rem; + border: 1px solid #ccc; + border-radius: 4px; + box-sizing: border-box; +} + +/* Loading Spinner */ +#loading-spinner { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(255, 255, 255, 0.8); + display: flex; + justify-content: center; + align-items: center; + z-index: 9999; +} + +.spinner { + border: 8px solid #f3f3f3; + border-top: 8px solid #3498db; + border-radius: 50%; + width: 60px; + height: 60px; + animation: spin 2s linear infinite; +} + +@keyframes spin { + 0% { transform: rotate(0deg); } + 100% { transform: rotate(360deg); } +} + +.hidden { + display: none; +} + +/* Responsive Design */ +@media (max-width: 768px) { + header { + flex-direction: column; + padding: 1rem; + } + + nav { + margin-top: 1rem; + } + + nav a { + margin: 0 0.5rem; + } + + main { + padding: 1rem; + } + + .dashboard-grid, + .grid { + grid-template-columns: 1fr; + grid-gap: 1rem; + } + + .card { + margin-bottom: 1rem; + } +}