feat: Replace capability broadcasting with availability broadcasting
- Add availability broadcasting every 30s showing real working status - Replace constant capability broadcasts with change-based system - Implement persistent capability storage in ~/.config/bzzz/ - Add SimpleTaskTracker for real task status monitoring - Only broadcast capabilities on startup or when models/capabilities change - Add proper Hive API URL configuration and integration - Fix capability change detection with proper comparison logic This eliminates P2P mesh spam and provides accurate node availability. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
253
pkg/config/config.go
Normal file
253
pkg/config/config.go
Normal file
@@ -0,0 +1,253 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// Config represents the complete configuration for a Bzzz agent
|
||||
type Config struct {
|
||||
HiveAPI HiveAPIConfig `yaml:"hive_api"`
|
||||
Agent AgentConfig `yaml:"agent"`
|
||||
GitHub GitHubConfig `yaml:"github"`
|
||||
P2P P2PConfig `yaml:"p2p"`
|
||||
Logging LoggingConfig `yaml:"logging"`
|
||||
}
|
||||
|
||||
// HiveAPIConfig holds Hive system integration settings
|
||||
type HiveAPIConfig struct {
|
||||
BaseURL string `yaml:"base_url"`
|
||||
APIKey string `yaml:"api_key"`
|
||||
Timeout time.Duration `yaml:"timeout"`
|
||||
RetryCount int `yaml:"retry_count"`
|
||||
}
|
||||
|
||||
// AgentConfig holds agent-specific configuration
|
||||
type AgentConfig struct {
|
||||
ID string `yaml:"id"`
|
||||
Capabilities []string `yaml:"capabilities"`
|
||||
PollInterval time.Duration `yaml:"poll_interval"`
|
||||
MaxTasks int `yaml:"max_tasks"`
|
||||
Models []string `yaml:"models"`
|
||||
Specialization string `yaml:"specialization"`
|
||||
}
|
||||
|
||||
// GitHubConfig holds GitHub integration settings
|
||||
type GitHubConfig struct {
|
||||
TokenFile string `yaml:"token_file"`
|
||||
UserAgent string `yaml:"user_agent"`
|
||||
Timeout time.Duration `yaml:"timeout"`
|
||||
RateLimit bool `yaml:"rate_limit"`
|
||||
}
|
||||
|
||||
// P2PConfig holds P2P networking configuration
|
||||
type P2PConfig struct {
|
||||
ServiceTag string `yaml:"service_tag"`
|
||||
BzzzTopic string `yaml:"bzzz_topic"`
|
||||
AntennaeTopic string `yaml:"antennae_topic"`
|
||||
DiscoveryTimeout time.Duration `yaml:"discovery_timeout"`
|
||||
|
||||
// Human escalation settings
|
||||
EscalationWebhook string `yaml:"escalation_webhook"`
|
||||
EscalationKeywords []string `yaml:"escalation_keywords"`
|
||||
ConversationLimit int `yaml:"conversation_limit"`
|
||||
}
|
||||
|
||||
// LoggingConfig holds logging configuration
|
||||
type LoggingConfig struct {
|
||||
Level string `yaml:"level"`
|
||||
Format string `yaml:"format"`
|
||||
Output string `yaml:"output"`
|
||||
Structured bool `yaml:"structured"`
|
||||
}
|
||||
|
||||
// LoadConfig loads configuration from file, environment variables, and defaults
|
||||
func LoadConfig(configPath string) (*Config, error) {
|
||||
// Start with defaults
|
||||
config := getDefaultConfig()
|
||||
|
||||
// Load from file if it exists
|
||||
if configPath != "" && fileExists(configPath) {
|
||||
if err := loadFromFile(config, configPath); err != nil {
|
||||
return nil, fmt.Errorf("failed to load config file: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Override with environment variables
|
||||
if err := loadFromEnv(config); err != nil {
|
||||
return nil, fmt.Errorf("failed to load environment variables: %w", err)
|
||||
}
|
||||
|
||||
// Validate configuration
|
||||
if err := validateConfig(config); err != nil {
|
||||
return nil, fmt.Errorf("invalid configuration: %w", err)
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
// getDefaultConfig returns the default configuration
|
||||
func getDefaultConfig() *Config {
|
||||
return &Config{
|
||||
HiveAPI: HiveAPIConfig{
|
||||
BaseURL: "https://hive.home.deepblack.cloud",
|
||||
Timeout: 30 * time.Second,
|
||||
RetryCount: 3,
|
||||
},
|
||||
Agent: AgentConfig{
|
||||
Capabilities: []string{"general", "reasoning", "task-coordination"},
|
||||
PollInterval: 30 * time.Second,
|
||||
MaxTasks: 3,
|
||||
Models: []string{"phi3", "llama3.1"},
|
||||
Specialization: "general_developer",
|
||||
},
|
||||
GitHub: GitHubConfig{
|
||||
TokenFile: "/home/tony/AI/secrets/passwords_and_tokens/gh-token",
|
||||
UserAgent: "Bzzz-P2P-Agent/1.0",
|
||||
Timeout: 30 * time.Second,
|
||||
RateLimit: true,
|
||||
},
|
||||
P2P: P2PConfig{
|
||||
ServiceTag: "bzzz-peer-discovery",
|
||||
BzzzTopic: "bzzz/coordination/v1",
|
||||
AntennaeTopic: "antennae/meta-discussion/v1",
|
||||
DiscoveryTimeout: 10 * time.Second,
|
||||
EscalationWebhook: "https://n8n.home.deepblack.cloud/webhook-test/human-escalation",
|
||||
EscalationKeywords: []string{"stuck", "help", "human", "escalate", "clarification needed", "manual intervention"},
|
||||
ConversationLimit: 10,
|
||||
},
|
||||
Logging: LoggingConfig{
|
||||
Level: "info",
|
||||
Format: "text",
|
||||
Output: "stdout",
|
||||
Structured: false,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// loadFromFile loads configuration from a YAML file
|
||||
func loadFromFile(config *Config, filePath string) error {
|
||||
data, err := ioutil.ReadFile(filePath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read config file: %w", err)
|
||||
}
|
||||
|
||||
if err := yaml.Unmarshal(data, config); err != nil {
|
||||
return fmt.Errorf("failed to parse YAML config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadFromEnv loads configuration from environment variables
|
||||
func loadFromEnv(config *Config) error {
|
||||
// Hive API configuration
|
||||
if url := os.Getenv("BZZZ_HIVE_API_URL"); url != "" {
|
||||
config.HiveAPI.BaseURL = url
|
||||
}
|
||||
if apiKey := os.Getenv("BZZZ_HIVE_API_KEY"); apiKey != "" {
|
||||
config.HiveAPI.APIKey = apiKey
|
||||
}
|
||||
|
||||
// Agent configuration
|
||||
if agentID := os.Getenv("BZZZ_AGENT_ID"); agentID != "" {
|
||||
config.Agent.ID = agentID
|
||||
}
|
||||
if capabilities := os.Getenv("BZZZ_AGENT_CAPABILITIES"); capabilities != "" {
|
||||
config.Agent.Capabilities = strings.Split(capabilities, ",")
|
||||
}
|
||||
if specialization := os.Getenv("BZZZ_AGENT_SPECIALIZATION"); specialization != "" {
|
||||
config.Agent.Specialization = specialization
|
||||
}
|
||||
|
||||
// GitHub configuration
|
||||
if tokenFile := os.Getenv("BZZZ_GITHUB_TOKEN_FILE"); tokenFile != "" {
|
||||
config.GitHub.TokenFile = tokenFile
|
||||
}
|
||||
|
||||
// P2P configuration
|
||||
if webhook := os.Getenv("BZZZ_ESCALATION_WEBHOOK"); webhook != "" {
|
||||
config.P2P.EscalationWebhook = webhook
|
||||
}
|
||||
|
||||
// Logging configuration
|
||||
if level := os.Getenv("BZZZ_LOG_LEVEL"); level != "" {
|
||||
config.Logging.Level = level
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateConfig validates the configuration values
|
||||
func validateConfig(config *Config) error {
|
||||
// Validate required fields
|
||||
if config.HiveAPI.BaseURL == "" {
|
||||
return fmt.Errorf("hive_api.base_url is required")
|
||||
}
|
||||
|
||||
// Note: Agent.ID can be empty - it will be auto-generated from node ID in main.go
|
||||
|
||||
if len(config.Agent.Capabilities) == 0 {
|
||||
return fmt.Errorf("agent.capabilities cannot be empty")
|
||||
}
|
||||
|
||||
if config.Agent.PollInterval <= 0 {
|
||||
return fmt.Errorf("agent.poll_interval must be positive")
|
||||
}
|
||||
|
||||
if config.Agent.MaxTasks <= 0 {
|
||||
return fmt.Errorf("agent.max_tasks must be positive")
|
||||
}
|
||||
|
||||
// Validate GitHub token file exists if specified
|
||||
if config.GitHub.TokenFile != "" && !fileExists(config.GitHub.TokenFile) {
|
||||
return fmt.Errorf("github token file does not exist: %s", config.GitHub.TokenFile)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// SaveConfig saves the configuration to a YAML file
|
||||
func SaveConfig(config *Config, filePath string) error {
|
||||
data, err := yaml.Marshal(config)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal config to YAML: %w", err)
|
||||
}
|
||||
|
||||
if err := ioutil.WriteFile(filePath, data, 0644); err != nil {
|
||||
return fmt.Errorf("failed to write config file: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetGitHubToken reads the GitHub token from the configured file
|
||||
func (c *Config) GetGitHubToken() (string, error) {
|
||||
if c.GitHub.TokenFile == "" {
|
||||
return "", fmt.Errorf("no GitHub token file configured")
|
||||
}
|
||||
|
||||
tokenBytes, err := ioutil.ReadFile(c.GitHub.TokenFile)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to read GitHub token: %w", err)
|
||||
}
|
||||
|
||||
return strings.TrimSpace(string(tokenBytes)), nil
|
||||
}
|
||||
|
||||
// fileExists checks if a file exists
|
||||
func fileExists(filePath string) bool {
|
||||
_, err := os.Stat(filePath)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// GenerateDefaultConfigFile creates a default configuration file
|
||||
func GenerateDefaultConfigFile(filePath string) error {
|
||||
config := getDefaultConfig()
|
||||
return SaveConfig(config, filePath)
|
||||
}
|
||||
188
pkg/config/defaults.go
Normal file
188
pkg/config/defaults.go
Normal file
@@ -0,0 +1,188 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
// DefaultConfigPaths returns the default locations to search for config files
|
||||
func DefaultConfigPaths() []string {
|
||||
homeDir, _ := os.UserHomeDir()
|
||||
|
||||
return []string{
|
||||
"./bzzz.yaml",
|
||||
"./config/bzzz.yaml",
|
||||
filepath.Join(homeDir, ".config", "bzzz", "config.yaml"),
|
||||
"/etc/bzzz/config.yaml",
|
||||
}
|
||||
}
|
||||
|
||||
// GetNodeSpecificDefaults returns configuration defaults based on the node
|
||||
func GetNodeSpecificDefaults(nodeID string) *Config {
|
||||
config := getDefaultConfig()
|
||||
|
||||
// Set node-specific agent ID
|
||||
config.Agent.ID = nodeID
|
||||
|
||||
// Set node-specific capabilities and models based on known cluster setup
|
||||
switch {
|
||||
case nodeID == "walnut" || containsString(nodeID, "walnut"):
|
||||
config.Agent.Capabilities = []string{"task-coordination", "meta-discussion", "ollama-reasoning", "code-generation"}
|
||||
config.Agent.Models = []string{"starcoder2:15b", "deepseek-coder-v2", "qwen3:14b", "phi3"}
|
||||
config.Agent.Specialization = "code_generation"
|
||||
|
||||
case nodeID == "ironwood" || containsString(nodeID, "ironwood"):
|
||||
config.Agent.Capabilities = []string{"task-coordination", "meta-discussion", "ollama-reasoning", "advanced-reasoning"}
|
||||
config.Agent.Models = []string{"phi4:14b", "phi4-reasoning:14b", "gemma3:12b", "devstral"}
|
||||
config.Agent.Specialization = "advanced_reasoning"
|
||||
|
||||
case nodeID == "acacia" || containsString(nodeID, "acacia"):
|
||||
config.Agent.Capabilities = []string{"task-coordination", "meta-discussion", "ollama-reasoning", "code-analysis"}
|
||||
config.Agent.Models = []string{"qwen2.5-coder", "deepseek-r1", "codellama", "llava"}
|
||||
config.Agent.Specialization = "code_analysis"
|
||||
|
||||
default:
|
||||
// Generic defaults for unknown nodes
|
||||
config.Agent.Capabilities = []string{"task-coordination", "meta-discussion", "general"}
|
||||
config.Agent.Models = []string{"phi3", "llama3.1"}
|
||||
config.Agent.Specialization = "general_developer"
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
// GetEnvironmentSpecificDefaults returns defaults based on environment
|
||||
func GetEnvironmentSpecificDefaults(environment string) *Config {
|
||||
config := getDefaultConfig()
|
||||
|
||||
switch environment {
|
||||
case "development", "dev":
|
||||
config.HiveAPI.BaseURL = "http://localhost:8000"
|
||||
config.P2P.EscalationWebhook = "http://localhost:5678/webhook-test/human-escalation"
|
||||
config.Logging.Level = "debug"
|
||||
config.Agent.PollInterval = 10 * time.Second
|
||||
|
||||
case "staging":
|
||||
config.HiveAPI.BaseURL = "https://hive-staging.home.deepblack.cloud"
|
||||
config.P2P.EscalationWebhook = "https://n8n-staging.home.deepblack.cloud/webhook-test/human-escalation"
|
||||
config.Logging.Level = "info"
|
||||
config.Agent.PollInterval = 20 * time.Second
|
||||
|
||||
case "production", "prod":
|
||||
config.HiveAPI.BaseURL = "https://hive.home.deepblack.cloud"
|
||||
config.P2P.EscalationWebhook = "https://n8n.home.deepblack.cloud/webhook-test/human-escalation"
|
||||
config.Logging.Level = "warn"
|
||||
config.Agent.PollInterval = 30 * time.Second
|
||||
|
||||
default:
|
||||
// Default to production-like settings
|
||||
config.Logging.Level = "info"
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
// GetCapabilityPresets returns predefined capability sets
|
||||
func GetCapabilityPresets() map[string][]string {
|
||||
return map[string][]string{
|
||||
"senior_developer": {
|
||||
"task-coordination",
|
||||
"meta-discussion",
|
||||
"ollama-reasoning",
|
||||
"code-generation",
|
||||
"code-review",
|
||||
"architecture",
|
||||
},
|
||||
"code_reviewer": {
|
||||
"task-coordination",
|
||||
"meta-discussion",
|
||||
"ollama-reasoning",
|
||||
"code-review",
|
||||
"security-analysis",
|
||||
"best-practices",
|
||||
},
|
||||
"debugger_specialist": {
|
||||
"task-coordination",
|
||||
"meta-discussion",
|
||||
"ollama-reasoning",
|
||||
"debugging",
|
||||
"error-analysis",
|
||||
"troubleshooting",
|
||||
},
|
||||
"devops_engineer": {
|
||||
"task-coordination",
|
||||
"meta-discussion",
|
||||
"deployment",
|
||||
"infrastructure",
|
||||
"monitoring",
|
||||
"automation",
|
||||
},
|
||||
"test_engineer": {
|
||||
"task-coordination",
|
||||
"meta-discussion",
|
||||
"testing",
|
||||
"quality-assurance",
|
||||
"test-automation",
|
||||
"validation",
|
||||
},
|
||||
"general_developer": {
|
||||
"task-coordination",
|
||||
"meta-discussion",
|
||||
"ollama-reasoning",
|
||||
"general",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// ApplyCapabilityPreset applies a predefined capability preset to the config
|
||||
func (c *Config) ApplyCapabilityPreset(presetName string) error {
|
||||
presets := GetCapabilityPresets()
|
||||
|
||||
capabilities, exists := presets[presetName]
|
||||
if !exists {
|
||||
return fmt.Errorf("unknown capability preset: %s", presetName)
|
||||
}
|
||||
|
||||
c.Agent.Capabilities = capabilities
|
||||
c.Agent.Specialization = presetName
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetModelPresets returns predefined model sets for different specializations
|
||||
func GetModelPresets() map[string][]string {
|
||||
return map[string][]string{
|
||||
"code_generation": {
|
||||
"starcoder2:15b",
|
||||
"deepseek-coder-v2",
|
||||
"codellama",
|
||||
},
|
||||
"advanced_reasoning": {
|
||||
"phi4:14b",
|
||||
"phi4-reasoning:14b",
|
||||
"deepseek-r1",
|
||||
},
|
||||
"code_analysis": {
|
||||
"qwen2.5-coder",
|
||||
"deepseek-coder-v2",
|
||||
"codellama",
|
||||
},
|
||||
"general_purpose": {
|
||||
"phi3",
|
||||
"llama3.1:8b",
|
||||
"qwen3",
|
||||
},
|
||||
"vision_tasks": {
|
||||
"llava",
|
||||
"llava:13b",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// containsString checks if a string contains a substring (case-insensitive)
|
||||
func containsString(s, substr string) bool {
|
||||
return len(s) >= len(substr) &&
|
||||
(s[:len(substr)] == substr || s[len(s)-len(substr):] == substr)
|
||||
}
|
||||
229
pkg/hive/client.go
Normal file
229
pkg/hive/client.go
Normal file
@@ -0,0 +1,229 @@
|
||||
package hive
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HiveClient provides integration with the Hive task coordination system
|
||||
type HiveClient struct {
|
||||
BaseURL string
|
||||
APIKey string
|
||||
HTTPClient *http.Client
|
||||
}
|
||||
|
||||
// NewHiveClient creates a new Hive API client
|
||||
func NewHiveClient(baseURL, apiKey string) *HiveClient {
|
||||
return &HiveClient{
|
||||
BaseURL: baseURL,
|
||||
APIKey: apiKey,
|
||||
HTTPClient: &http.Client{
|
||||
Timeout: 30 * time.Second,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Repository represents a Git repository configuration from Hive
|
||||
type Repository struct {
|
||||
ProjectID int `json:"project_id"`
|
||||
Name string `json:"name"`
|
||||
GitURL string `json:"git_url"`
|
||||
Owner string `json:"owner"`
|
||||
Repository string `json:"repository"`
|
||||
Branch string `json:"branch"`
|
||||
BzzzEnabled bool `json:"bzzz_enabled"`
|
||||
ReadyToClaim bool `json:"ready_to_claim"`
|
||||
PrivateRepo bool `json:"private_repo"`
|
||||
GitHubTokenRequired bool `json:"github_token_required"`
|
||||
}
|
||||
|
||||
// ActiveRepositoriesResponse represents the response from /api/bzzz/active-repos
|
||||
type ActiveRepositoriesResponse struct {
|
||||
Repositories []Repository `json:"repositories"`
|
||||
}
|
||||
|
||||
// TaskClaimRequest represents a task claim request to Hive
|
||||
type TaskClaimRequest struct {
|
||||
TaskID int `json:"task_id"`
|
||||
AgentID string `json:"agent_id"`
|
||||
ClaimedAt int64 `json:"claimed_at"`
|
||||
}
|
||||
|
||||
// TaskStatusUpdate represents a task status update to Hive
|
||||
type TaskStatusUpdate struct {
|
||||
Status string `json:"status"`
|
||||
UpdatedAt int64 `json:"updated_at"`
|
||||
Results map[string]interface{} `json:"results,omitempty"`
|
||||
}
|
||||
|
||||
// GetActiveRepositories fetches all repositories marked for Bzzz consumption
|
||||
func (c *HiveClient) GetActiveRepositories(ctx context.Context) ([]Repository, error) {
|
||||
url := fmt.Sprintf("%s/api/bzzz/active-repos", c.BaseURL)
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create request: %w", err)
|
||||
}
|
||||
|
||||
// Add authentication if API key is provided
|
||||
if c.APIKey != "" {
|
||||
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to execute request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
var response ActiveRepositoriesResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode response: %w", err)
|
||||
}
|
||||
|
||||
return response.Repositories, nil
|
||||
}
|
||||
|
||||
// GetProjectTasks fetches bzzz-task labeled issues for a specific project
|
||||
func (c *HiveClient) GetProjectTasks(ctx context.Context, projectID int) ([]map[string]interface{}, error) {
|
||||
url := fmt.Sprintf("%s/api/bzzz/projects/%d/tasks", c.BaseURL, projectID)
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create request: %w", err)
|
||||
}
|
||||
|
||||
if c.APIKey != "" {
|
||||
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to execute request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, fmt.Errorf("API request failed with status %d: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
var tasks []map[string]interface{}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&tasks); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode response: %w", err)
|
||||
}
|
||||
|
||||
return tasks, nil
|
||||
}
|
||||
|
||||
// ClaimTask registers a task claim with the Hive system
|
||||
func (c *HiveClient) ClaimTask(ctx context.Context, projectID, taskID int, agentID string) error {
|
||||
url := fmt.Sprintf("%s/api/bzzz/projects/%d/claim", c.BaseURL, projectID)
|
||||
|
||||
claimRequest := TaskClaimRequest{
|
||||
TaskID: taskID,
|
||||
AgentID: agentID,
|
||||
ClaimedAt: time.Now().Unix(),
|
||||
}
|
||||
|
||||
jsonData, err := json.Marshal(claimRequest)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal claim request: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewBuffer(jsonData))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create request: %w", err)
|
||||
}
|
||||
|
||||
if c.APIKey != "" {
|
||||
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to execute request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("claim request failed with status %d: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateTaskStatus updates the task status in the Hive system
|
||||
func (c *HiveClient) UpdateTaskStatus(ctx context.Context, projectID, taskID int, status string, results map[string]interface{}) error {
|
||||
url := fmt.Sprintf("%s/api/bzzz/projects/%d/status", c.BaseURL, projectID)
|
||||
|
||||
statusUpdate := TaskStatusUpdate{
|
||||
Status: status,
|
||||
UpdatedAt: time.Now().Unix(),
|
||||
Results: results,
|
||||
}
|
||||
|
||||
jsonData, err := json.Marshal(statusUpdate)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal status update: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "PUT", url, bytes.NewBuffer(jsonData))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create request: %w", err)
|
||||
}
|
||||
|
||||
if c.APIKey != "" {
|
||||
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to execute request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("status update failed with status %d: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// HealthCheck verifies connectivity to the Hive API
|
||||
func (c *HiveClient) HealthCheck(ctx context.Context) error {
|
||||
url := fmt.Sprintf("%s/health", c.BaseURL)
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create health check request: %w", err)
|
||||
}
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("health check request failed: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("Hive API health check failed with status: %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
118
pkg/hive/models.go
Normal file
118
pkg/hive/models.go
Normal file
@@ -0,0 +1,118 @@
|
||||
package hive
|
||||
|
||||
import "time"
|
||||
|
||||
// Project represents a project managed by the Hive system
|
||||
type Project struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Status string `json:"status"`
|
||||
GitURL string `json:"git_url"`
|
||||
Owner string `json:"owner"`
|
||||
Repository string `json:"repository"`
|
||||
Branch string `json:"branch"`
|
||||
BzzzEnabled bool `json:"bzzz_enabled"`
|
||||
ReadyToClaim bool `json:"ready_to_claim"`
|
||||
PrivateRepo bool `json:"private_repo"`
|
||||
GitHubTokenRequired bool `json:"github_token_required"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
Metadata map[string]interface{} `json:"metadata,omitempty"`
|
||||
}
|
||||
|
||||
// Task represents a task (GitHub issue) from the Hive system
|
||||
type Task struct {
|
||||
ID int `json:"id"`
|
||||
ProjectID int `json:"project_id"`
|
||||
ProjectName string `json:"project_name"`
|
||||
GitURL string `json:"git_url"`
|
||||
Owner string `json:"owner"`
|
||||
Repository string `json:"repository"`
|
||||
Branch string `json:"branch"`
|
||||
|
||||
// GitHub issue fields
|
||||
IssueNumber int `json:"issue_number"`
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description"`
|
||||
State string `json:"state"`
|
||||
Assignee string `json:"assignee,omitempty"`
|
||||
|
||||
// Task metadata
|
||||
TaskType string `json:"task_type"`
|
||||
Priority int `json:"priority"`
|
||||
Labels []string `json:"labels"`
|
||||
Requirements []string `json:"requirements,omitempty"`
|
||||
Deliverables []string `json:"deliverables,omitempty"`
|
||||
Context map[string]interface{} `json:"context,omitempty"`
|
||||
|
||||
// Timestamps
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// TaskClaim represents a task claim in the Hive system
|
||||
type TaskClaim struct {
|
||||
ID int `json:"id"`
|
||||
ProjectID int `json:"project_id"`
|
||||
TaskID int `json:"task_id"`
|
||||
AgentID string `json:"agent_id"`
|
||||
Status string `json:"status"` // claimed, in_progress, completed, failed
|
||||
ClaimedAt time.Time `json:"claimed_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
Results map[string]interface{} `json:"results,omitempty"`
|
||||
}
|
||||
|
||||
// ProjectActivationRequest represents a request to activate/deactivate a project
|
||||
type ProjectActivationRequest struct {
|
||||
BzzzEnabled bool `json:"bzzz_enabled"`
|
||||
ReadyToClaim bool `json:"ready_to_claim"`
|
||||
}
|
||||
|
||||
// ProjectRegistrationRequest represents a request to register a new project
|
||||
type ProjectRegistrationRequest struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
GitURL string `json:"git_url"`
|
||||
PrivateRepo bool `json:"private_repo"`
|
||||
BzzzEnabled bool `json:"bzzz_enabled"`
|
||||
AutoActivate bool `json:"auto_activate"`
|
||||
}
|
||||
|
||||
// AgentCapability represents an agent's capabilities for task matching
|
||||
type AgentCapability struct {
|
||||
AgentID string `json:"agent_id"`
|
||||
NodeID string `json:"node_id"`
|
||||
Capabilities []string `json:"capabilities"`
|
||||
Models []string `json:"models"`
|
||||
Status string `json:"status"`
|
||||
LastSeen time.Time `json:"last_seen"`
|
||||
}
|
||||
|
||||
// CoordinationEvent represents a P2P coordination event
|
||||
type CoordinationEvent struct {
|
||||
EventID string `json:"event_id"`
|
||||
ProjectID int `json:"project_id"`
|
||||
TaskID int `json:"task_id"`
|
||||
EventType string `json:"event_type"` // task_claimed, plan_proposed, escalated, completed
|
||||
AgentID string `json:"agent_id"`
|
||||
Message string `json:"message"`
|
||||
Context map[string]interface{} `json:"context,omitempty"`
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
}
|
||||
|
||||
// ErrorResponse represents an error response from the Hive API
|
||||
type ErrorResponse struct {
|
||||
Error string `json:"error"`
|
||||
Message string `json:"message"`
|
||||
Code string `json:"code,omitempty"`
|
||||
}
|
||||
|
||||
// HealthStatus represents the health status of the Hive system
|
||||
type HealthStatus struct {
|
||||
Status string `json:"status"`
|
||||
Version string `json:"version"`
|
||||
Database string `json:"database"`
|
||||
Uptime string `json:"uptime"`
|
||||
CheckedAt time.Time `json:"checked_at"`
|
||||
}
|
||||
Reference in New Issue
Block a user