WIP: Save current work before CHORUS rebrand
- Agent roles integration progress - Various backend and frontend updates - Storybook cache cleanup 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
287
backend/app/api/bzzz_logs.py
Normal file
287
backend/app/api/bzzz_logs.py
Normal file
@@ -0,0 +1,287 @@
|
||||
"""
|
||||
Bzzz hypercore/hyperswarm log streaming API endpoints.
|
||||
Provides real-time access to agent communication logs from the Bzzz network.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect, HTTPException, Query
|
||||
from fastapi.responses import StreamingResponse
|
||||
from typing import List, Optional, Dict, Any
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import httpx
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Keep track of active WebSocket connections
|
||||
active_connections: List[WebSocket] = []
|
||||
|
||||
class BzzzLogEntry:
|
||||
"""Represents a Bzzz hypercore log entry"""
|
||||
def __init__(self, data: Dict[str, Any]):
|
||||
self.index = data.get("index", 0)
|
||||
self.timestamp = data.get("timestamp", "")
|
||||
self.author = data.get("author", "")
|
||||
self.log_type = data.get("type", "")
|
||||
self.message_data = data.get("data", {})
|
||||
self.hash_value = data.get("hash", "")
|
||||
self.prev_hash = data.get("prev_hash", "")
|
||||
|
||||
def to_chat_message(self) -> Dict[str, Any]:
|
||||
"""Convert hypercore log entry to chat message format"""
|
||||
# Extract message details from the log data
|
||||
msg_data = self.message_data
|
||||
|
||||
return {
|
||||
"id": f"log-{self.index}",
|
||||
"senderId": msg_data.get("from_short", self.author),
|
||||
"senderName": msg_data.get("from_short", self.author),
|
||||
"content": self._format_message_content(),
|
||||
"timestamp": self.timestamp,
|
||||
"messageType": self._determine_message_type(),
|
||||
"channel": msg_data.get("topic", "unknown"),
|
||||
"swarmId": f"swarm-{msg_data.get('topic', 'unknown')}",
|
||||
"isDelivered": True,
|
||||
"isRead": True,
|
||||
"logType": self.log_type,
|
||||
"hash": self.hash_value
|
||||
}
|
||||
|
||||
def _format_message_content(self) -> str:
|
||||
"""Format the log entry into a readable message"""
|
||||
msg_data = self.message_data
|
||||
message_type = msg_data.get("message_type", self.log_type)
|
||||
|
||||
if message_type == "availability_broadcast":
|
||||
status = msg_data.get("data", {}).get("status", "unknown")
|
||||
current_tasks = msg_data.get("data", {}).get("current_tasks", 0)
|
||||
max_tasks = msg_data.get("data", {}).get("max_tasks", 0)
|
||||
return f"Status: {status} ({current_tasks}/{max_tasks} tasks)"
|
||||
|
||||
elif message_type == "capability_broadcast":
|
||||
capabilities = msg_data.get("data", {}).get("capabilities", [])
|
||||
models = msg_data.get("data", {}).get("models", [])
|
||||
return f"Updated capabilities: {', '.join(capabilities[:3])}{'...' if len(capabilities) > 3 else ''}"
|
||||
|
||||
elif message_type == "task_announced":
|
||||
task_data = msg_data.get("data", {})
|
||||
return f"Task announced: {task_data.get('title', 'Unknown task')}"
|
||||
|
||||
elif message_type == "task_claimed":
|
||||
task_data = msg_data.get("data", {})
|
||||
return f"Task claimed: {task_data.get('title', 'Unknown task')}"
|
||||
|
||||
elif message_type == "role_announcement":
|
||||
role = msg_data.get("data", {}).get("role", "unknown")
|
||||
return f"Role announcement: {role}"
|
||||
|
||||
elif message_type == "collaboration":
|
||||
return f"Collaboration: {msg_data.get('data', {}).get('content', 'Agent discussion')}"
|
||||
|
||||
elif self.log_type == "peer_joined":
|
||||
return "Agent joined the network"
|
||||
|
||||
elif self.log_type == "peer_left":
|
||||
return "Agent left the network"
|
||||
|
||||
else:
|
||||
# Generic fallback
|
||||
return f"{message_type}: {json.dumps(msg_data.get('data', {}))[:100]}{'...' if len(str(msg_data.get('data', {}))) > 100 else ''}"
|
||||
|
||||
def _determine_message_type(self) -> str:
|
||||
"""Determine if this is a sent, received, or system message"""
|
||||
msg_data = self.message_data
|
||||
|
||||
# System messages
|
||||
if self.log_type in ["peer_joined", "peer_left", "network_event"]:
|
||||
return "system"
|
||||
|
||||
# For now, treat all as received since we're monitoring
|
||||
# In a real implementation, you'd check if the author is the current node
|
||||
return "received"
|
||||
|
||||
class BzzzLogStreamer:
|
||||
"""Manages streaming of Bzzz hypercore logs"""
|
||||
|
||||
def __init__(self):
|
||||
self.agent_endpoints = {}
|
||||
self.last_indices = {} # Track last seen index per agent
|
||||
|
||||
async def discover_bzzz_agents(self) -> List[Dict[str, str]]:
|
||||
"""Discover active Bzzz agents from the Hive agents API"""
|
||||
try:
|
||||
# This would typically query the actual agents database
|
||||
# For now, return known endpoints based on cluster nodes
|
||||
return [
|
||||
{"agent_id": "acacia-bzzz", "endpoint": "http://acacia.local:8080"},
|
||||
{"agent_id": "walnut-bzzz", "endpoint": "http://walnut.local:8080"},
|
||||
{"agent_id": "ironwood-bzzz", "endpoint": "http://ironwood.local:8080"},
|
||||
{"agent_id": "rosewood-bzzz", "endpoint": "http://rosewood.local:8080"},
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to discover Bzzz agents: {e}")
|
||||
return []
|
||||
|
||||
async def fetch_agent_logs(self, agent_endpoint: str, since_index: int = 0) -> List[BzzzLogEntry]:
|
||||
"""Fetch hypercore logs from a specific Bzzz agent"""
|
||||
try:
|
||||
# This would call the actual Bzzz agent's HTTP API
|
||||
# For now, return mock data structure that matches hypercore format
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{agent_endpoint}/api/hypercore/logs",
|
||||
params={"since": since_index},
|
||||
timeout=5.0
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
logs_data = response.json()
|
||||
return [BzzzLogEntry(log) for log in logs_data.get("entries", [])]
|
||||
else:
|
||||
logger.warning(f"Failed to fetch logs from {agent_endpoint}: {response.status_code}")
|
||||
return []
|
||||
|
||||
except httpx.ConnectError:
|
||||
logger.debug(f"Agent at {agent_endpoint} is not reachable")
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching logs from {agent_endpoint}: {e}")
|
||||
return []
|
||||
|
||||
async def get_recent_logs(self, limit: int = 100) -> List[Dict[str, Any]]:
|
||||
"""Get recent logs from all agents"""
|
||||
agents = await self.discover_bzzz_agents()
|
||||
all_messages = []
|
||||
|
||||
for agent in agents:
|
||||
logs = await self.fetch_agent_logs(agent["endpoint"])
|
||||
for log in logs[-limit:]: # Get recent entries
|
||||
message = log.to_chat_message()
|
||||
message["agent_id"] = agent["agent_id"]
|
||||
all_messages.append(message)
|
||||
|
||||
# Sort by timestamp
|
||||
all_messages.sort(key=lambda x: x["timestamp"])
|
||||
return all_messages[-limit:]
|
||||
|
||||
async def stream_new_logs(self):
|
||||
"""Continuously stream new logs from all agents"""
|
||||
while True:
|
||||
try:
|
||||
agents = await self.discover_bzzz_agents()
|
||||
new_messages = []
|
||||
|
||||
for agent in agents:
|
||||
agent_id = agent["agent_id"]
|
||||
last_index = self.last_indices.get(agent_id, 0)
|
||||
|
||||
logs = await self.fetch_agent_logs(agent["endpoint"], last_index)
|
||||
|
||||
for log in logs:
|
||||
if log.index > last_index:
|
||||
message = log.to_chat_message()
|
||||
message["agent_id"] = agent_id
|
||||
new_messages.append(message)
|
||||
self.last_indices[agent_id] = log.index
|
||||
|
||||
# Send new messages to all connected WebSocket clients
|
||||
if new_messages and active_connections:
|
||||
message_data = {
|
||||
"type": "new_messages",
|
||||
"messages": new_messages
|
||||
}
|
||||
|
||||
# Remove disconnected clients
|
||||
disconnected = []
|
||||
for connection in active_connections:
|
||||
try:
|
||||
await connection.send_text(json.dumps(message_data))
|
||||
except:
|
||||
disconnected.append(connection)
|
||||
|
||||
for conn in disconnected:
|
||||
active_connections.remove(conn)
|
||||
|
||||
await asyncio.sleep(2) # Poll every 2 seconds
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in log streaming: {e}")
|
||||
await asyncio.sleep(5)
|
||||
|
||||
# Global log streamer instance
|
||||
log_streamer = BzzzLogStreamer()
|
||||
|
||||
@router.get("/bzzz/logs")
|
||||
async def get_bzzz_logs(
|
||||
limit: int = Query(default=100, le=1000),
|
||||
agent_id: Optional[str] = None
|
||||
):
|
||||
"""Get recent Bzzz hypercore logs"""
|
||||
try:
|
||||
logs = await log_streamer.get_recent_logs(limit)
|
||||
|
||||
if agent_id:
|
||||
logs = [log for log in logs if log.get("agent_id") == agent_id]
|
||||
|
||||
return {
|
||||
"logs": logs,
|
||||
"count": len(logs),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Bzzz logs: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/bzzz/agents")
|
||||
async def get_bzzz_agents():
|
||||
"""Get list of discovered Bzzz agents"""
|
||||
try:
|
||||
agents = await log_streamer.discover_bzzz_agents()
|
||||
return {"agents": agents}
|
||||
except Exception as e:
|
||||
logger.error(f"Error discovering Bzzz agents: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.websocket("/bzzz/logs/stream")
|
||||
async def websocket_bzzz_logs(websocket: WebSocket):
|
||||
"""WebSocket endpoint for real-time Bzzz log streaming"""
|
||||
await websocket.accept()
|
||||
active_connections.append(websocket)
|
||||
|
||||
try:
|
||||
# Send initial recent logs
|
||||
recent_logs = await log_streamer.get_recent_logs(50)
|
||||
await websocket.send_text(json.dumps({
|
||||
"type": "initial_logs",
|
||||
"messages": recent_logs
|
||||
}))
|
||||
|
||||
# Keep connection alive and handle client messages
|
||||
while True:
|
||||
try:
|
||||
# Wait for client messages (ping, filters, etc.)
|
||||
message = await asyncio.wait_for(websocket.receive_text(), timeout=30)
|
||||
client_data = json.loads(message)
|
||||
|
||||
if client_data.get("type") == "ping":
|
||||
await websocket.send_text(json.dumps({"type": "pong"}))
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
# Send periodic heartbeat
|
||||
await websocket.send_text(json.dumps({"type": "heartbeat"}))
|
||||
|
||||
except WebSocketDisconnect:
|
||||
active_connections.remove(websocket)
|
||||
except Exception as e:
|
||||
logger.error(f"WebSocket error: {e}")
|
||||
if websocket in active_connections:
|
||||
active_connections.remove(websocket)
|
||||
|
||||
# Start the log streaming background task
|
||||
@router.on_event("startup")
|
||||
async def start_log_streaming():
|
||||
"""Start the background log streaming task"""
|
||||
asyncio.create_task(log_streamer.stream_new_logs())
|
||||
Reference in New Issue
Block a user