Major WHOOSH system refactoring and feature enhancements
- Migrated from HIVE branding to WHOOSH across all components - Enhanced backend API with new services: AI models, BZZZ integration, templates, members - Added comprehensive testing suite with security, performance, and integration tests - Improved frontend with new components for project setup, AI models, and team management - Updated MCP server implementation with WHOOSH-specific tools and resources - Enhanced deployment configurations with production-ready Docker setups - Added comprehensive documentation and setup guides - Implemented age encryption service and UCXL integration 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Auto-Discovery Agent Registration Script for Hive
|
||||
Auto-Discovery Agent Registration Script for WHOOSH
|
||||
Automatically discovers Ollama endpoints on the subnet and registers them as agents
|
||||
"""
|
||||
|
||||
@@ -15,7 +15,7 @@ from typing import Dict, List, Optional, Tuple
|
||||
import time
|
||||
|
||||
# Configuration
|
||||
HIVE_API_URL = "https://hive.home.deepblack.cloud"
|
||||
WHOOSH_API_URL = "https://whoosh.home.deepblack.cloud"
|
||||
SUBNET_BASE = "192.168.1"
|
||||
OLLAMA_PORT = 11434
|
||||
DISCOVERY_TIMEOUT = 3
|
||||
@@ -171,7 +171,7 @@ class AgentDiscovery:
|
||||
model_str = " ".join(models).lower()
|
||||
hostname_lower = hostname.lower()
|
||||
|
||||
# Check hostname patterns - map to valid Hive AgentType values
|
||||
# Check hostname patterns - map to valid WHOOSH AgentType values
|
||||
if "walnut" in hostname_lower:
|
||||
return "pytorch_dev" # Full-stack development
|
||||
elif "acacia" in hostname_lower:
|
||||
@@ -232,7 +232,7 @@ class AgentDiscovery:
|
||||
return capability_map.get(specialty, ["general_development", "code_assistance"])
|
||||
|
||||
async def register_agent(self, agent_info: Dict) -> bool:
|
||||
"""Register a discovered agent with Hive"""
|
||||
"""Register a discovered agent with WHOOSH"""
|
||||
try:
|
||||
hostname = agent_info["system_info"]["hostname"]
|
||||
specialty = self.determine_agent_specialty(agent_info["models"], hostname)
|
||||
@@ -258,7 +258,7 @@ class AgentDiscovery:
|
||||
}
|
||||
|
||||
async with self.session.post(
|
||||
f"{HIVE_API_URL}/api/agents",
|
||||
f"{WHOOSH_API_URL}/api/agents",
|
||||
json=agent_data,
|
||||
headers={"Content-Type": "application/json"}
|
||||
) as response:
|
||||
@@ -275,29 +275,29 @@ class AgentDiscovery:
|
||||
print(f" ❌ Error registering {agent_info['host']}: {e}")
|
||||
return False
|
||||
|
||||
async def test_hive_connection(self) -> bool:
|
||||
"""Test connection to Hive API"""
|
||||
async def test_whoosh_connection(self) -> bool:
|
||||
"""Test connection to WHOOSH API"""
|
||||
try:
|
||||
async with self.session.get(f"{HIVE_API_URL}/health") as response:
|
||||
async with self.session.get(f"{WHOOSH_API_URL}/health") as response:
|
||||
if response.status == 200:
|
||||
print("✅ Connected to Hive API")
|
||||
print("✅ Connected to WHOOSH API")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Hive API returned status {response.status}")
|
||||
print(f"❌ WHOOSH API returned status {response.status}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Failed to connect to Hive API: {e}")
|
||||
print(f"❌ Failed to connect to WHOOSH API: {e}")
|
||||
return False
|
||||
|
||||
async def main():
|
||||
"""Main discovery and registration process"""
|
||||
print("🐝 Hive Agent Auto-Discovery Script")
|
||||
print("🐝 WHOOSH Agent Auto-Discovery Script")
|
||||
print("=" * 50)
|
||||
|
||||
async with AgentDiscovery() as discovery:
|
||||
# Test Hive connection
|
||||
if not await discovery.test_hive_connection():
|
||||
print("❌ Cannot connect to Hive API. Make sure Hive is running.")
|
||||
# Test WHOOSH connection
|
||||
if not await discovery.test_whoosh_connection():
|
||||
print("❌ Cannot connect to WHOOSH API. Make sure WHOOSH is running.")
|
||||
sys.exit(1)
|
||||
|
||||
# Discover agents
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Hive Cluster Coordination for n8n Workflow Development
|
||||
WHOOSH Cluster Coordination for n8n Workflow Development
|
||||
Coordinates distributed development of intelligent task allocation workflows
|
||||
"""
|
||||
|
||||
@@ -45,7 +45,7 @@ AGENTS = {
|
||||
}
|
||||
}
|
||||
|
||||
class HiveN8NCoordinator:
|
||||
class WHOOSHN8NCoordinator:
|
||||
def __init__(self):
|
||||
self.session = None
|
||||
self.results = {}
|
||||
@@ -134,7 +134,7 @@ Respond with detailed technical solutions, code examples, and implementation gui
|
||||
async def coordinate_development(self):
|
||||
"""Coordinate the distributed development of n8n workflows"""
|
||||
|
||||
print("🐝 HIVE CLUSTER N8N WORKFLOW DEVELOPMENT COORDINATION")
|
||||
print("🐝 WHOOSH CLUSTER N8N WORKFLOW DEVELOPMENT COORDINATION")
|
||||
print("=" * 70)
|
||||
print(f"🚀 Coordinating {len(AGENTS)} specialized AI agents")
|
||||
print(f"🎯 Target: Intelligent task allocation workflows for 25-person software company")
|
||||
@@ -276,7 +276,7 @@ Respond with detailed technical solutions, code examples, and implementation gui
|
||||
|
||||
# Save detailed results
|
||||
timestamp = int(time.time())
|
||||
results_file = f"/home/tony/AI/projects/hive/results/n8n_coordination_{timestamp}.json"
|
||||
results_file = f"/home/tony/AI/projects/whoosh/results/n8n_coordination_{timestamp}.json"
|
||||
|
||||
with open(results_file, 'w') as f:
|
||||
json.dump({
|
||||
@@ -313,9 +313,9 @@ async def main():
|
||||
|
||||
# Ensure results directory exists
|
||||
import os
|
||||
os.makedirs("/home/tony/AI/projects/hive/results", exist_ok=True)
|
||||
os.makedirs("/home/tony/AI/projects/whoosh/results", exist_ok=True)
|
||||
|
||||
async with HiveN8NCoordinator() as coordinator:
|
||||
async with WHOOSHN8NCoordinator() as coordinator:
|
||||
await coordinator.coordinate_development()
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Distributed Hive Workflow Deployment Script
|
||||
# Distributed WHOOSH Workflow Deployment Script
|
||||
# Deploys the enhanced distributed development workflow system across the cluster
|
||||
|
||||
set -e
|
||||
@@ -13,7 +13,7 @@ BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Configuration
|
||||
PROJECT_ROOT="/home/tony/AI/projects/hive"
|
||||
PROJECT_ROOT="/home/tony/AI/projects/whoosh"
|
||||
CLUSTER_NODES=("192.168.1.72" "192.168.1.27" "192.168.1.113" "192.168.1.132" "192.168.1.106")
|
||||
CLUSTER_NAMES=("ACACIA" "WALNUT" "IRONWOOD" "ROSEWOOD" "FORSTEINET")
|
||||
SSH_USER="tony"
|
||||
@@ -98,8 +98,8 @@ setup_redis() {
|
||||
sudo systemctl enable redis-server
|
||||
|
||||
# Configure Redis for cluster coordination
|
||||
sudo tee /etc/redis/redis.conf.d/hive-distributed.conf > /dev/null <<EOF
|
||||
# Hive Distributed Workflow Configuration
|
||||
sudo tee /etc/redis/redis.conf.d/whoosh-distributed.conf > /dev/null <<EOF
|
||||
# WHOOSH Distributed Workflow Configuration
|
||||
maxmemory 512mb
|
||||
maxmemory-policy allkeys-lru
|
||||
save 900 1
|
||||
@@ -143,7 +143,7 @@ deploy_cluster_config() {
|
||||
|
||||
# Create configuration package
|
||||
cd "$PROJECT_ROOT"
|
||||
tar -czf /tmp/hive-distributed-config.tar.gz config/distributed_config.yaml
|
||||
tar -czf /tmp/whoosh-distributed-config.tar.gz config/distributed_config.yaml
|
||||
|
||||
for i in "${!CLUSTER_NODES[@]}"; do
|
||||
node="${CLUSTER_NODES[$i]}"
|
||||
@@ -152,13 +152,13 @@ deploy_cluster_config() {
|
||||
log "Deploying to $name ($node)..."
|
||||
|
||||
# Copy configuration
|
||||
sshpass -p "$SSH_PASS" scp -o StrictHostKeyChecking=no /tmp/hive-distributed-config.tar.gz "$SSH_USER@$node:/tmp/"
|
||||
sshpass -p "$SSH_PASS" scp -o StrictHostKeyChecking=no /tmp/whoosh-distributed-config.tar.gz "$SSH_USER@$node:/tmp/"
|
||||
|
||||
# Extract and setup configuration
|
||||
sshpass -p "$SSH_PASS" ssh -o StrictHostKeyChecking=no "$SSH_USER@$node" "
|
||||
mkdir -p /home/$SSH_USER/AI/projects/hive/config
|
||||
cd /home/$SSH_USER/AI/projects/hive/config
|
||||
tar -xzf /tmp/hive-distributed-config.tar.gz
|
||||
mkdir -p /home/$SSH_USER/AI/projects/whoosh/config
|
||||
cd /home/$SSH_USER/AI/projects/whoosh/config
|
||||
tar -xzf /tmp/whoosh-distributed-config.tar.gz
|
||||
chmod 644 distributed_config.yaml
|
||||
"
|
||||
|
||||
@@ -166,7 +166,7 @@ deploy_cluster_config() {
|
||||
done
|
||||
|
||||
# Clean up
|
||||
rm -f /tmp/hive-distributed-config.tar.gz
|
||||
rm -f /tmp/whoosh-distributed-config.tar.gz
|
||||
}
|
||||
|
||||
# Update Ollama configurations for distributed workflows
|
||||
@@ -209,9 +209,9 @@ start_distributed_system() {
|
||||
cd "$PROJECT_ROOT/backend"
|
||||
source venv/bin/activate
|
||||
|
||||
# Start the main Hive application with distributed workflows
|
||||
# Start the main WHOOSH application with distributed workflows
|
||||
export PYTHONPATH="$PROJECT_ROOT/backend:$PYTHONPATH"
|
||||
export HIVE_CONFIG_PATH="$PROJECT_ROOT/config/distributed_config.yaml"
|
||||
export WHOOSH_CONFIG_PATH="$PROJECT_ROOT/config/distributed_config.yaml"
|
||||
|
||||
# Run database migrations
|
||||
log "Running database migrations..."
|
||||
@@ -222,23 +222,23 @@ print('Database initialized')
|
||||
"
|
||||
|
||||
# Start the application in the background
|
||||
log "Starting Hive with distributed workflows..."
|
||||
log "Starting WHOOSH with distributed workflows..."
|
||||
nohup python -m uvicorn app.main:app \
|
||||
--host 0.0.0.0 \
|
||||
--port 8000 \
|
||||
--reload \
|
||||
--log-level info > /tmp/hive-distributed.log 2>&1 &
|
||||
--log-level info > /tmp/whoosh-distributed.log 2>&1 &
|
||||
|
||||
HIVE_PID=$!
|
||||
echo $HIVE_PID > /tmp/hive-distributed.pid
|
||||
WHOOSH_PID=$!
|
||||
echo $WHOOSH_PID > /tmp/whoosh-distributed.pid
|
||||
|
||||
# Wait for startup
|
||||
sleep 10
|
||||
|
||||
# Check if the service is running
|
||||
if kill -0 $HIVE_PID 2>/dev/null; then
|
||||
success "Distributed workflow system started (PID: $HIVE_PID)"
|
||||
log "Application logs: tail -f /tmp/hive-distributed.log"
|
||||
if kill -0 $WHOOSH_PID 2>/dev/null; then
|
||||
success "Distributed workflow system started (PID: $WHOOSH_PID)"
|
||||
log "Application logs: tail -f /tmp/whoosh-distributed.log"
|
||||
log "Health check: curl http://localhost:8000/health"
|
||||
log "Distributed API: curl http://localhost:8000/api/distributed/cluster/status"
|
||||
else
|
||||
@@ -297,9 +297,9 @@ except:
|
||||
create_systemd_service() {
|
||||
log "Creating systemd service for production deployment..."
|
||||
|
||||
sudo tee /etc/systemd/system/hive-distributed.service > /dev/null <<EOF
|
||||
sudo tee /etc/systemd/system/whoosh-distributed.service > /dev/null <<EOF
|
||||
[Unit]
|
||||
Description=Hive Distributed Workflow System
|
||||
Description=WHOOSH Distributed Workflow System
|
||||
After=network.target redis.service
|
||||
Wants=redis.service
|
||||
|
||||
@@ -309,7 +309,7 @@ User=$USER
|
||||
Group=$USER
|
||||
WorkingDirectory=$PROJECT_ROOT/backend
|
||||
Environment=PYTHONPATH=$PROJECT_ROOT/backend
|
||||
Environment=HIVE_CONFIG_PATH=$PROJECT_ROOT/config/distributed_config.yaml
|
||||
Environment=WHOOSH_CONFIG_PATH=$PROJECT_ROOT/config/distributed_config.yaml
|
||||
ExecStart=$PROJECT_ROOT/backend/venv/bin/python -m uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
ExecReload=/bin/kill -HUP \$MAINPID
|
||||
Restart=always
|
||||
@@ -323,28 +323,28 @@ EOF
|
||||
|
||||
# Enable the service
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable hive-distributed.service
|
||||
sudo systemctl enable whoosh-distributed.service
|
||||
|
||||
success "Systemd service created and enabled"
|
||||
log "Use 'sudo systemctl start hive-distributed' to start the service"
|
||||
log "Use 'sudo systemctl status hive-distributed' to check status"
|
||||
log "Use 'sudo systemctl start whoosh-distributed' to start the service"
|
||||
log "Use 'sudo systemctl status whoosh-distributed' to check status"
|
||||
}
|
||||
|
||||
# Generate deployment report
|
||||
generate_report() {
|
||||
log "Generating deployment report..."
|
||||
|
||||
report_file="/tmp/hive-distributed-deployment-report.txt"
|
||||
report_file="/tmp/whoosh-distributed-deployment-report.txt"
|
||||
|
||||
cat > "$report_file" <<EOF
|
||||
# Hive Distributed Workflow System - Deployment Report
|
||||
# WHOOSH Distributed Workflow System - Deployment Report
|
||||
Generated: $(date)
|
||||
|
||||
## Deployment Summary
|
||||
- Project Directory: $PROJECT_ROOT
|
||||
- Configuration: $PROJECT_ROOT/config/distributed_config.yaml
|
||||
- Log File: /tmp/hive-distributed.log
|
||||
- PID File: /tmp/hive-distributed.pid
|
||||
- Log File: /tmp/whoosh-distributed.log
|
||||
- PID File: /tmp/whoosh-distributed.pid
|
||||
|
||||
## Cluster Configuration
|
||||
EOF
|
||||
@@ -366,11 +366,11 @@ EOF
|
||||
- Performance Metrics: http://localhost:8000/api/distributed/performance/metrics
|
||||
|
||||
## Management Commands
|
||||
- Start Service: sudo systemctl start hive-distributed
|
||||
- Stop Service: sudo systemctl stop hive-distributed
|
||||
- Restart Service: sudo systemctl restart hive-distributed
|
||||
- View Logs: sudo journalctl -u hive-distributed -f
|
||||
- View Application Logs: tail -f /tmp/hive-distributed.log
|
||||
- Start Service: sudo systemctl start whoosh-distributed
|
||||
- Stop Service: sudo systemctl stop whoosh-distributed
|
||||
- Restart Service: sudo systemctl restart whoosh-distributed
|
||||
- View Logs: sudo journalctl -u whoosh-distributed -f
|
||||
- View Application Logs: tail -f /tmp/whoosh-distributed.log
|
||||
|
||||
## Cluster Operations
|
||||
- Check Cluster Status: curl http://localhost:8000/api/distributed/cluster/status
|
||||
@@ -400,13 +400,13 @@ EOF
|
||||
main() {
|
||||
echo -e "${GREEN}"
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ Hive Distributed Workflow Deployment ║"
|
||||
echo "║ WHOOSH Distributed Workflow Deployment ║"
|
||||
echo "║ ║"
|
||||
echo "║ Deploying cluster-wide development workflow orchestration ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo -e "${NC}"
|
||||
|
||||
log "Starting deployment of Hive Distributed Workflow System..."
|
||||
log "Starting deployment of WHOOSH Distributed Workflow System..."
|
||||
|
||||
# Run deployment steps
|
||||
check_prerequisites
|
||||
@@ -424,7 +424,7 @@ main() {
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ Deployment Completed! ║"
|
||||
echo "║ ║"
|
||||
echo "║ 🚀 Hive Distributed Workflow System is now running ║"
|
||||
echo "║ 🚀 WHOOSH Distributed Workflow System is now running ║"
|
||||
echo "║ 📊 Visit http://localhost:8000/docs for API documentation ║"
|
||||
echo "║ 🌐 Cluster status: http://localhost:8000/api/distributed/ ║"
|
||||
echo "║ cluster/status ║"
|
||||
@@ -438,24 +438,24 @@ case "${1:-deploy}" in
|
||||
main
|
||||
;;
|
||||
"start")
|
||||
log "Starting Hive Distributed Workflow System..."
|
||||
sudo systemctl start hive-distributed
|
||||
log "Starting WHOOSH Distributed Workflow System..."
|
||||
sudo systemctl start whoosh-distributed
|
||||
;;
|
||||
"stop")
|
||||
log "Stopping Hive Distributed Workflow System..."
|
||||
sudo systemctl stop hive-distributed
|
||||
if [ -f /tmp/hive-distributed.pid ]; then
|
||||
kill $(cat /tmp/hive-distributed.pid) 2>/dev/null || true
|
||||
rm -f /tmp/hive-distributed.pid
|
||||
log "Stopping WHOOSH Distributed Workflow System..."
|
||||
sudo systemctl stop whoosh-distributed
|
||||
if [ -f /tmp/whoosh-distributed.pid ]; then
|
||||
kill $(cat /tmp/whoosh-distributed.pid) 2>/dev/null || true
|
||||
rm -f /tmp/whoosh-distributed.pid
|
||||
fi
|
||||
;;
|
||||
"status")
|
||||
log "Checking system status..."
|
||||
sudo systemctl status hive-distributed
|
||||
sudo systemctl status whoosh-distributed
|
||||
;;
|
||||
"logs")
|
||||
log "Showing application logs..."
|
||||
tail -f /tmp/hive-distributed.log
|
||||
tail -f /tmp/whoosh-distributed.log
|
||||
;;
|
||||
"health")
|
||||
log "Running health checks..."
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migration script to consolidate existing distributed AI projects into Hive
|
||||
Migration script to consolidate existing distributed AI projects into WHOOSH
|
||||
"""
|
||||
|
||||
import os
|
||||
@@ -18,13 +18,13 @@ from datetime import datetime
|
||||
sys.path.append('/home/tony/AI/projects/distributed-ai-dev')
|
||||
sys.path.append('/home/tony/AI/projects/McPlan/mcplan-web/backend')
|
||||
|
||||
class HiveMigrator:
|
||||
class WHOOSHMigrator:
|
||||
"""
|
||||
Migrates and consolidates data from existing distributed AI projects
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.hive_root = Path("/home/tony/AI/projects/hive")
|
||||
self.whoosh_root = Path("/home/tony/AI/projects/whoosh")
|
||||
self.projects = {
|
||||
'distributed-ai-dev': Path("/home/tony/AI/projects/distributed-ai-dev"),
|
||||
'mcplan': Path("/home/tony/AI/projects/McPlan"),
|
||||
@@ -50,11 +50,11 @@ class HiveMigrator:
|
||||
|
||||
async def migrate_all(self):
|
||||
"""Execute complete migration process"""
|
||||
self.log("🚀 Starting Hive migration from existing projects")
|
||||
self.log("🚀 Starting WHOOSH migration from existing projects")
|
||||
|
||||
try:
|
||||
# Phase 1: Setup and validation
|
||||
await self.setup_hive_structure()
|
||||
await self.setup_whoosh_structure()
|
||||
await self.validate_source_projects()
|
||||
|
||||
# Phase 2: Configuration migration
|
||||
@@ -78,9 +78,9 @@ class HiveMigrator:
|
||||
self.error(f"Migration failed: {str(e)}")
|
||||
raise
|
||||
|
||||
async def setup_hive_structure(self):
|
||||
"""Create Hive project directory structure"""
|
||||
self.log("📁 Setting up Hive project structure")
|
||||
async def setup_whoosh_structure(self):
|
||||
"""Create WHOOSH project directory structure"""
|
||||
self.log("📁 Setting up WHOOSH project structure")
|
||||
|
||||
directories = [
|
||||
"backend/app/core",
|
||||
@@ -114,7 +114,7 @@ class HiveMigrator:
|
||||
]
|
||||
|
||||
for directory in directories:
|
||||
(self.hive_root / directory).mkdir(parents=True, exist_ok=True)
|
||||
(self.whoosh_root / directory).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.log(f"Created {len(directories)} directories")
|
||||
|
||||
@@ -141,9 +141,9 @@ class HiveMigrator:
|
||||
with open(source_config, 'r') as f:
|
||||
agents_config = yaml.safe_load(f)
|
||||
|
||||
# Create enhanced Hive configuration
|
||||
hive_config = {
|
||||
'hive': {
|
||||
# Create enhanced WHOOSH configuration
|
||||
whoosh_config = {
|
||||
'whoosh': {
|
||||
'cluster': {
|
||||
'name': 'Development Cluster',
|
||||
'region': 'home.deepblack.cloud'
|
||||
@@ -200,11 +200,11 @@ class HiveMigrator:
|
||||
'max_response_time': 30
|
||||
})
|
||||
}
|
||||
hive_config['hive']['agents'][agent_id] = enhanced_config
|
||||
whoosh_config['whoosh']['agents'][agent_id] = enhanced_config
|
||||
|
||||
# Add default agents if none exist
|
||||
if not hive_config['hive']['agents']:
|
||||
hive_config['hive']['agents'] = {
|
||||
if not whoosh_config['whoosh']['agents']:
|
||||
whoosh_config['whoosh']['agents'] = {
|
||||
'acacia': {
|
||||
'name': 'ACACIA Infrastructure Specialist',
|
||||
'endpoint': 'http://192.168.1.72:11434',
|
||||
@@ -256,11 +256,11 @@ class HiveMigrator:
|
||||
}
|
||||
|
||||
# Save unified configuration
|
||||
config_path = self.hive_root / 'config' / 'hive.yaml'
|
||||
config_path = self.whoosh_root / 'config' / 'whoosh.yaml'
|
||||
with open(config_path, 'w') as f:
|
||||
yaml.dump(hive_config, f, default_flow_style=False, sort_keys=False)
|
||||
yaml.dump(whoosh_config, f, default_flow_style=False, sort_keys=False)
|
||||
|
||||
self.log(f"✅ Migrated {len(hive_config['hive']['agents'])} agent configurations")
|
||||
self.log(f"✅ Migrated {len(whoosh_config['whoosh']['agents'])} agent configurations")
|
||||
|
||||
async def migrate_monitoring_configs(self):
|
||||
"""Migrate monitoring configurations from cluster project"""
|
||||
@@ -272,15 +272,15 @@ class HiveMigrator:
|
||||
'scrape_interval': '30s',
|
||||
'evaluation_interval': '30s'
|
||||
},
|
||||
'rule_files': ['hive_alerts.yml'],
|
||||
'rule_files': ['whoosh_alerts.yml'],
|
||||
'scrape_configs': [
|
||||
{
|
||||
'job_name': 'hive-backend',
|
||||
'static_configs': [{'targets': ['hive-coordinator:8000']}],
|
||||
'job_name': 'whoosh-backend',
|
||||
'static_configs': [{'targets': ['whoosh-coordinator:8000']}],
|
||||
'metrics_path': '/api/metrics'
|
||||
},
|
||||
{
|
||||
'job_name': 'hive-agents',
|
||||
'job_name': 'whoosh-agents',
|
||||
'static_configs': [
|
||||
{'targets': ['192.168.1.72:11434']},
|
||||
{'targets': ['192.168.1.27:11434']},
|
||||
@@ -290,15 +290,15 @@ class HiveMigrator:
|
||||
]
|
||||
}
|
||||
|
||||
prometheus_path = self.hive_root / 'config' / 'monitoring' / 'prometheus.yml'
|
||||
prometheus_path = self.whoosh_root / 'config' / 'monitoring' / 'prometheus.yml'
|
||||
with open(prometheus_path, 'w') as f:
|
||||
yaml.dump(prometheus_config, f)
|
||||
|
||||
# Create Grafana dashboard configurations
|
||||
grafana_config = {
|
||||
'dashboards': {
|
||||
'hive_overview': {
|
||||
'title': 'Hive Cluster Overview',
|
||||
'whoosh_overview': {
|
||||
'title': 'WHOOSH Cluster Overview',
|
||||
'panels': [
|
||||
'Agent Status',
|
||||
'Task Queue Length',
|
||||
@@ -319,7 +319,7 @@ class HiveMigrator:
|
||||
}
|
||||
}
|
||||
|
||||
grafana_path = self.hive_root / 'config' / 'monitoring' / 'grafana.yml'
|
||||
grafana_path = self.whoosh_root / 'config' / 'monitoring' / 'grafana.yml'
|
||||
with open(grafana_path, 'w') as f:
|
||||
yaml.dump(grafana_config, f)
|
||||
|
||||
@@ -332,7 +332,7 @@ class HiveMigrator:
|
||||
# Map of source files to destination files
|
||||
component_mapping = {
|
||||
# From distributed-ai-dev
|
||||
'distributed-ai-dev/src/core/ai_dev_coordinator.py': 'backend/app/core/hive_coordinator.py',
|
||||
'distributed-ai-dev/src/core/ai_dev_coordinator.py': 'backend/app/core/whoosh_coordinator.py',
|
||||
'distributed-ai-dev/src/monitoring/performance_monitor.py': 'backend/app/core/performance_monitor.py',
|
||||
'distributed-ai-dev/src/config/agent_manager.py': 'backend/app/core/agent_manager.py',
|
||||
|
||||
@@ -357,7 +357,7 @@ class HiveMigrator:
|
||||
break
|
||||
|
||||
if source_path:
|
||||
dest_path = self.hive_root / dest_rel
|
||||
dest_path = self.whoosh_root / dest_rel
|
||||
|
||||
if source_path.is_file():
|
||||
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
@@ -377,7 +377,7 @@ class HiveMigrator:
|
||||
self.log("🗄️ Creating unified database schema")
|
||||
|
||||
schema_sql = """
|
||||
-- Hive Unified Database Schema
|
||||
-- WHOOSH Unified Database Schema
|
||||
|
||||
-- User Management
|
||||
CREATE TABLE users (
|
||||
@@ -497,11 +497,11 @@ CREATE INDEX idx_alerts_unresolved ON alerts(resolved, created_at) WHERE resolve
|
||||
|
||||
-- Sample data
|
||||
INSERT INTO users (email, hashed_password, role) VALUES
|
||||
('admin@hive.local', '$2b$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/lewohT6ZErjH.2T.2', 'admin'),
|
||||
('developer@hive.local', '$2b$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/lewohT6ZErjH.2T.2', 'developer');
|
||||
('admin@whoosh.local', '$2b$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/lewohT6ZErjH.2T.2', 'admin'),
|
||||
('developer@whoosh.local', '$2b$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/lewohT6ZErjH.2T.2', 'developer');
|
||||
"""
|
||||
|
||||
schema_path = self.hive_root / 'backend' / 'migrations' / '001_initial_schema.sql'
|
||||
schema_path = self.whoosh_root / 'backend' / 'migrations' / '001_initial_schema.sql'
|
||||
with open(schema_path, 'w') as f:
|
||||
f.write(schema_sql)
|
||||
|
||||
@@ -537,8 +537,8 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
'migrated_at': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
# Save to Hive workflows directory
|
||||
dest_file = self.hive_root / 'config' / 'workflows' / f'{workflow_file.stem}.json'
|
||||
# Save to WHOOSH workflows directory
|
||||
dest_file = self.whoosh_root / 'config' / 'workflows' / f'{workflow_file.stem}.json'
|
||||
with open(dest_file, 'w') as f:
|
||||
json.dump(migration_record, f, indent=2)
|
||||
|
||||
@@ -591,7 +591,7 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
conn.close()
|
||||
|
||||
# Save migration data
|
||||
migration_file = self.hive_root / 'scripts' / 'mcplan_data_export.json'
|
||||
migration_file = self.whoosh_root / 'scripts' / 'mcplan_data_export.json'
|
||||
with open(migration_file, 'w') as f:
|
||||
json.dump(migration_data, f, indent=2, default=str)
|
||||
|
||||
@@ -609,11 +609,11 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
'migration_summary': {
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'source_projects': list(self.projects.keys()),
|
||||
'hive_version': '1.0.0',
|
||||
'whoosh_version': '1.0.0',
|
||||
'migration_status': 'completed' if not self.errors else 'completed_with_errors'
|
||||
},
|
||||
'components_migrated': {
|
||||
'agent_configurations': 'config/hive.yaml',
|
||||
'agent_configurations': 'config/whoosh.yaml',
|
||||
'monitoring_configs': 'config/monitoring/',
|
||||
'database_schema': 'backend/migrations/001_initial_schema.sql',
|
||||
'core_components': 'backend/app/core/',
|
||||
@@ -634,12 +634,12 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
'errors': self.errors
|
||||
}
|
||||
|
||||
report_path = self.hive_root / 'MIGRATION_REPORT.json'
|
||||
report_path = self.whoosh_root / 'MIGRATION_REPORT.json'
|
||||
with open(report_path, 'w') as f:
|
||||
json.dump(report, f, indent=2)
|
||||
|
||||
# Also create a markdown summary
|
||||
md_report = f"""# Hive Migration Report
|
||||
md_report = f"""# WHOOSH Migration Report
|
||||
|
||||
## Summary
|
||||
- **Migration Date**: {report['migration_summary']['timestamp']}
|
||||
@@ -666,7 +666,7 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
for error in self.errors:
|
||||
md_report += f"- {error}\n"
|
||||
|
||||
md_report_path = self.hive_root / 'MIGRATION_REPORT.md'
|
||||
md_report_path = self.whoosh_root / 'MIGRATION_REPORT.md'
|
||||
with open(md_report_path, 'w') as f:
|
||||
f.write(md_report)
|
||||
|
||||
@@ -676,20 +676,20 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
|
||||
async def main():
|
||||
"""Main migration function"""
|
||||
migrator = HiveMigrator()
|
||||
migrator = WHOOSHMigrator()
|
||||
|
||||
try:
|
||||
await migrator.migrate_all()
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("🎉 HIVE MIGRATION COMPLETED!")
|
||||
print("🎉 WHOOSH MIGRATION COMPLETED!")
|
||||
print("="*60)
|
||||
print(f"✅ Migration successful with {len(migrator.errors)} errors")
|
||||
print(f"📁 Hive project created at: {migrator.hive_root}")
|
||||
print(f"📁 WHOOSH project created at: {migrator.whoosh_root}")
|
||||
print(f"📋 Check MIGRATION_REPORT.md for detailed results")
|
||||
print("\nNext steps:")
|
||||
print("1. cd /home/tony/AI/projects/hive")
|
||||
print("2. Review config/hive.yaml")
|
||||
print("1. cd /home/tony/AI/projects/whoosh")
|
||||
print("2. Review config/whoosh.yaml")
|
||||
print("3. docker-compose up -d")
|
||||
print("4. Visit http://localhost:3000")
|
||||
print("="*60)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Agent Registration Script for Hive
|
||||
Registers cluster agents with the Hive orchestration system
|
||||
Agent Registration Script for WHOOSH
|
||||
Registers cluster agents with the WHOOSH orchestration system
|
||||
"""
|
||||
|
||||
import json
|
||||
@@ -12,11 +12,11 @@ import time
|
||||
from pathlib import Path
|
||||
|
||||
# Configuration
|
||||
HIVE_API_URL = "https://hive.home.deepblack.cloud/api"
|
||||
CONFIG_FILE = "/home/tony/AI/projects/hive/config/hive.yaml"
|
||||
WHOOSH_API_URL = "https://whoosh.home.deepblack.cloud/api"
|
||||
CONFIG_FILE = "/home/tony/AI/projects/whoosh/config/whoosh.yaml"
|
||||
|
||||
def load_config():
|
||||
"""Load the hive.yaml configuration file"""
|
||||
"""Load the whoosh.yaml configuration file"""
|
||||
try:
|
||||
with open(CONFIG_FILE, 'r') as f:
|
||||
return yaml.safe_load(f)
|
||||
@@ -24,18 +24,18 @@ def load_config():
|
||||
print(f"❌ Error loading config: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
def test_hive_connection():
|
||||
"""Test connection to Hive API"""
|
||||
def test_whoosh_connection():
|
||||
"""Test connection to WHOOSH API"""
|
||||
try:
|
||||
response = requests.get(f"{HIVE_API_URL}/health", timeout=5)
|
||||
response = requests.get(f"{WHOOSH_API_URL}/health", timeout=5)
|
||||
if response.status_code == 200:
|
||||
print("✅ Connected to Hive API")
|
||||
print("✅ Connected to WHOOSH API")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ Hive API returned status {response.status_code}")
|
||||
print(f"❌ WHOOSH API returned status {response.status_code}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Failed to connect to Hive API: {e}")
|
||||
print(f"❌ Failed to connect to WHOOSH API: {e}")
|
||||
return False
|
||||
|
||||
def test_agent_connectivity(endpoint):
|
||||
@@ -47,7 +47,7 @@ def test_agent_connectivity(endpoint):
|
||||
return False
|
||||
|
||||
def register_agent(agent_id, agent_config):
|
||||
"""Register a single agent with Hive"""
|
||||
"""Register a single agent with WHOOSH"""
|
||||
|
||||
# Check if agent is responsive
|
||||
if not test_agent_connectivity(agent_config['endpoint']):
|
||||
@@ -71,7 +71,7 @@ def register_agent(agent_id, agent_config):
|
||||
try:
|
||||
# Register the agent
|
||||
response = requests.post(
|
||||
f"{HIVE_API_URL}/api/agents",
|
||||
f"{WHOOSH_API_URL}/api/agents",
|
||||
json=agent_data,
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=10
|
||||
@@ -91,17 +91,17 @@ def register_agent(agent_id, agent_config):
|
||||
|
||||
def main():
|
||||
"""Main registration process"""
|
||||
print("🐝 Hive Agent Registration Script")
|
||||
print("🐝 WHOOSH Agent Registration Script")
|
||||
print("=" * 50)
|
||||
|
||||
# Test Hive connection
|
||||
if not test_hive_connection():
|
||||
print("❌ Cannot connect to Hive API. Make sure Hive is running.")
|
||||
# Test WHOOSH connection
|
||||
if not test_whoosh_connection():
|
||||
print("❌ Cannot connect to WHOOSH API. Make sure WHOOSH is running.")
|
||||
sys.exit(1)
|
||||
|
||||
# Load configuration
|
||||
config = load_config()
|
||||
agents = config.get('hive', {}).get('agents', {})
|
||||
agents = config.get('whoosh', {}).get('agents', {})
|
||||
|
||||
if not agents:
|
||||
print("❌ No agents found in configuration")
|
||||
@@ -136,7 +136,7 @@ def main():
|
||||
|
||||
if successful_registrations > 0:
|
||||
print(f"\n🎉 Successfully registered {successful_registrations} agents!")
|
||||
print("🔗 Check agent status: curl https://hive.home.deepblack.cloud/api/agents")
|
||||
print("🔗 Check agent status: curl https://whoosh.home.deepblack.cloud/api/agents")
|
||||
else:
|
||||
print("\n💔 No agents were successfully registered.")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
#!/bin/bash
|
||||
|
||||
# 🐝 Hive Claude Integration Setup Script
|
||||
# 🐝 WHOOSH Claude Integration Setup Script
|
||||
# Sets up MCP server configuration for Claude Desktop
|
||||
|
||||
set -e
|
||||
|
||||
echo "🐝 Setting up Hive MCP Server for Claude Integration..."
|
||||
echo "🐝 Setting up WHOOSH MCP Server for Claude Integration..."
|
||||
|
||||
# Get the absolute path to the Hive project
|
||||
HIVE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
MCP_SERVER_PATH="$HIVE_DIR/mcp-server/dist/index.js"
|
||||
# Get the absolute path to the WHOOSH project
|
||||
WHOOSH_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
MCP_SERVER_PATH="$WHOOSH_DIR/mcp-server/dist/index.js"
|
||||
|
||||
echo "📁 Hive directory: $HIVE_DIR"
|
||||
echo "📁 WHOOSH directory: $WHOOSH_DIR"
|
||||
echo "🔧 MCP server path: $MCP_SERVER_PATH"
|
||||
|
||||
# Check if MCP server is built
|
||||
if [ ! -f "$MCP_SERVER_PATH" ]; then
|
||||
echo "❌ MCP server not found. Building..."
|
||||
cd "$HIVE_DIR/mcp-server"
|
||||
cd "$WHOOSH_DIR/mcp-server"
|
||||
npm install
|
||||
npm run build
|
||||
echo "✅ MCP server built successfully"
|
||||
@@ -61,12 +61,12 @@ echo "📝 Creating Claude Desktop configuration..."
|
||||
cat > "$CLAUDE_CONFIG_FILE" << EOF
|
||||
{
|
||||
"mcpServers": {
|
||||
"hive": {
|
||||
"whoosh": {
|
||||
"command": "node",
|
||||
"args": ["$MCP_SERVER_PATH"],
|
||||
"env": {
|
||||
"HIVE_API_URL": "http://localhost:8087",
|
||||
"HIVE_WS_URL": "ws://localhost:8087"
|
||||
"WHOOSH_API_URL": "http://localhost:8087",
|
||||
"WHOOSH_WS_URL": "ws://localhost:8087"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -76,22 +76,22 @@ EOF
|
||||
echo "✅ Claude Desktop configuration created!"
|
||||
echo ""
|
||||
echo "🎯 Next Steps:"
|
||||
echo "1. Ensure your Hive cluster is running:"
|
||||
echo " cd $HIVE_DIR && docker compose ps"
|
||||
echo "1. Ensure your WHOOSH cluster is running:"
|
||||
echo " cd $WHOOSH_DIR && docker compose ps"
|
||||
echo ""
|
||||
echo "2. Restart Claude Desktop to load the MCP server"
|
||||
echo ""
|
||||
echo "3. In Claude, you can now use commands like:"
|
||||
echo " • 'Show me my Hive cluster status'"
|
||||
echo " • 'Show me my WHOOSH cluster status'"
|
||||
echo " • 'Register a new agent at http://walnut.local:11434'"
|
||||
echo " • 'Create a kernel development task for FlashAttention optimization'"
|
||||
echo " • 'Coordinate development across my distributed team'"
|
||||
echo ""
|
||||
echo "🐝 Hive MCP integration is ready!"
|
||||
echo "🐝 WHOOSH MCP integration is ready!"
|
||||
echo ""
|
||||
echo "📋 Configuration Details:"
|
||||
echo " • MCP Server: $MCP_SERVER_PATH"
|
||||
echo " • Hive API: http://localhost:8087"
|
||||
echo " • WHOOSH API: http://localhost:8087"
|
||||
echo " • Claude Config: $CLAUDE_CONFIG_FILE"
|
||||
echo ""
|
||||
echo "🔧 To modify the configuration later, edit: $CLAUDE_CONFIG_FILE"
|
||||
@@ -1,13 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Hive Startup Script
|
||||
# WHOOSH Startup Script
|
||||
# Unified Distributed AI Orchestration Platform
|
||||
|
||||
set -e
|
||||
|
||||
# Use relative path or environment variable
|
||||
HIVE_ROOT="${HIVE_ROOT:-$(dirname "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)")}"
|
||||
LOG_FILE="$HIVE_ROOT/logs/startup.log"
|
||||
WHOOSH_ROOT="${WHOOSH_ROOT:-$(dirname "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)")}"
|
||||
LOG_FILE="$WHOOSH_ROOT/logs/startup.log"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
@@ -19,7 +19,7 @@ CYAN='\033[0;36m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Create logs directory
|
||||
mkdir -p "$HIVE_ROOT/logs"
|
||||
mkdir -p "$WHOOSH_ROOT/logs"
|
||||
|
||||
log() {
|
||||
echo -e "$1" | tee -a "$LOG_FILE"
|
||||
@@ -45,7 +45,7 @@ log_error() {
|
||||
echo -e "${PURPLE}"
|
||||
cat << "EOF"
|
||||
🐝 =============================================== 🐝
|
||||
HIVE - Distributed AI Orchestration Platform
|
||||
WHOOSH - Distributed AI Orchestration Platform
|
||||
|
||||
Consolidating the power of:
|
||||
• McPlan (n8n → MCP Bridge)
|
||||
@@ -56,10 +56,10 @@ cat << "EOF"
|
||||
EOF
|
||||
echo -e "${NC}"
|
||||
|
||||
# Change to Hive directory
|
||||
cd "$HIVE_ROOT"
|
||||
# Change to WHOOSH directory
|
||||
cd "$WHOOSH_ROOT"
|
||||
|
||||
log_info "Starting Hive initialization..."
|
||||
log_info "Starting WHOOSH initialization..."
|
||||
log_info "Working directory: $(pwd)"
|
||||
log_info "Timestamp: $(date)"
|
||||
|
||||
@@ -80,8 +80,8 @@ fi
|
||||
log_success "docker compose is available"
|
||||
|
||||
# Check if docker-compose.swarm.yml exists
|
||||
if [ ! -f "$HIVE_ROOT/docker-compose.swarm.yml" ]; then
|
||||
log_error "docker-compose.swarm.yml not found in $HIVE_ROOT"
|
||||
if [ ! -f "$WHOOSH_ROOT/docker-compose.swarm.yml" ]; then
|
||||
log_error "docker-compose.swarm.yml not found in $WHOOSH_ROOT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -94,21 +94,21 @@ else
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Build Hive services
|
||||
log_info "Building Hive services..."
|
||||
if docker build -t registry.home.deepblack.cloud/tony/hive-backend:latest ./backend && docker build -t registry.home.deepblack.cloud/tony/hive-frontend:latest ./frontend; then
|
||||
log_success "Hive services built successfully"
|
||||
# Build WHOOSH services
|
||||
log_info "Building WHOOSH services..."
|
||||
if docker build -t registry.home.deepblack.cloud/tony/whoosh-backend:latest ./backend && docker build -t registry.home.deepblack.cloud/tony/whoosh-frontend:latest ./frontend; then
|
||||
log_success "WHOOSH services built successfully"
|
||||
else
|
||||
log_error "Failed to build Hive services"
|
||||
log_error "Failed to build WHOOSH services"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Deploy services using docker stack
|
||||
log_info "Deploying Hive services..."
|
||||
if docker stack deploy -c docker-compose.swarm.yml hive; then
|
||||
log_success "Hive services deployed successfully"
|
||||
log_info "Deploying WHOOSH services..."
|
||||
if docker stack deploy -c docker-compose.swarm.yml whoosh; then
|
||||
log_success "WHOOSH services deployed successfully"
|
||||
else
|
||||
log_error "Failed to deploy Hive services"
|
||||
log_error "Failed to deploy WHOOSH services"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -131,14 +131,14 @@ wait_for_service() {
|
||||
}
|
||||
|
||||
# Wait for backend API
|
||||
if wait_for_service "hive-backend" "http://localhost:8000/health"; then
|
||||
if wait_for_service "whoosh-backend" "http://localhost:8000/health"; then
|
||||
log_success "Backend API is ready"
|
||||
else
|
||||
log_warning "Backend API not responding after 60 seconds"
|
||||
fi
|
||||
|
||||
# Wait for frontend
|
||||
if wait_for_service "hive-frontend" "http://localhost:3000"; then
|
||||
if wait_for_service "whoosh-frontend" "http://localhost:3000"; then
|
||||
log_success "Frontend is ready"
|
||||
else
|
||||
log_warning "Frontend not responding after 60 seconds"
|
||||
@@ -147,7 +147,7 @@ fi
|
||||
# Check service health using docker stack
|
||||
log_info "Checking service health..."
|
||||
|
||||
services=("hive_postgres" "hive_redis" "hive_hive-backend" "hive_hive-frontend" "hive_prometheus" "hive_grafana")
|
||||
services=("whoosh_postgres" "whoosh_redis" "whoosh_whoosh-backend" "whoosh_whoosh-frontend" "whoosh_prometheus" "whoosh_grafana")
|
||||
healthy_services=0
|
||||
|
||||
for service in "${services[@]}"; do
|
||||
@@ -167,12 +167,12 @@ fi
|
||||
|
||||
# Display service URLs
|
||||
echo -e "\n${CYAN}🔗 Service URLs:${NC}"
|
||||
echo -e "${GREEN} • Hive Frontend:${NC} https://hive.home.deepblack.cloud"
|
||||
echo -e "${GREEN} • Hive API:${NC} https://hive-api.home.deepblack.cloud"
|
||||
echo -e "${GREEN} • API Documentation:${NC} https://hive-api.home.deepblack.cloud/docs"
|
||||
echo -e "${GREEN} • Grafana Dashboard:${NC} https://hive-grafana.home.deepblack.cloud (admin/hiveadmin)"
|
||||
echo -e "${GREEN} • Prometheus:${NC} https://hive-prometheus.home.deepblack.cloud"
|
||||
echo -e "${GREEN} • PostgreSQL:${NC} localhost:5432 (hive/hivepass)"
|
||||
echo -e "${GREEN} • WHOOSH Frontend:${NC} https://whoosh.home.deepblack.cloud"
|
||||
echo -e "${GREEN} • WHOOSH API:${NC} https://whoosh-api.home.deepblack.cloud"
|
||||
echo -e "${GREEN} • API Documentation:${NC} https://whoosh-api.home.deepblack.cloud/docs"
|
||||
echo -e "${GREEN} • Grafana Dashboard:${NC} https://whoosh-grafana.home.deepblack.cloud (admin/whooshadmin)"
|
||||
echo -e "${GREEN} • Prometheus:${NC} https://whoosh-prometheus.home.deepblack.cloud"
|
||||
echo -e "${GREEN} • PostgreSQL:${NC} localhost:5432 (whoosh/whooshpass)"
|
||||
echo -e "${GREEN} • Redis:${NC} localhost:6379"
|
||||
|
||||
# Display agent status
|
||||
@@ -186,18 +186,18 @@ echo -e "${GREEN} • TULLY:${NC} http://Tullys-MacBook-Air.local:11434 (Mo
|
||||
|
||||
# Display next steps
|
||||
echo -e "\n${PURPLE}📋 Next Steps:${NC}"
|
||||
echo -e "${YELLOW} 1.${NC} Open Hive Dashboard: ${BLUE}https://hive.home.deepblack.cloud${NC}"
|
||||
echo -e "${YELLOW} 1.${NC} Open WHOOSH Dashboard: ${BLUE}https://whoosh.home.deepblack.cloud${NC}"
|
||||
echo -e "${YELLOW} 2.${NC} Check agent connectivity in the dashboard"
|
||||
echo -e "${YELLOW} 3.${NC} Import or create your first workflow"
|
||||
echo -e "${YELLOW} 4.${NC} Monitor execution in real-time"
|
||||
echo -e "${YELLOW} 5.${NC} View metrics in Grafana: ${BLUE}https://hive-grafana.home.deepblack.cloud${NC}"
|
||||
echo -e "${YELLOW} 5.${NC} View metrics in Grafana: ${BLUE}https://whoosh-grafana.home.deepblack.cloud${NC}"
|
||||
|
||||
# Display management commands
|
||||
echo -e "\n${PURPLE}🛠️ Management Commands:${NC}"
|
||||
echo -e "${YELLOW} • View logs:${NC} docker service logs hive_hive-backend"
|
||||
echo -e "${YELLOW} • Stop services:${NC} docker stack rm hive"
|
||||
echo -e "${YELLOW} • Restart:${NC} docker stack rm hive && docker stack deploy -c docker-compose.swarm.yml hive"
|
||||
echo -e "${YELLOW} • Shell access:${NC} docker exec -it \$(docker ps -q -f name=hive_hive-backend) bash"
|
||||
echo -e "${YELLOW} • View logs:${NC} docker service logs whoosh_whoosh-backend"
|
||||
echo -e "${YELLOW} • Stop services:${NC} docker stack rm whoosh"
|
||||
echo -e "${YELLOW} • Restart:${NC} docker stack rm whoosh && docker stack deploy -c docker-compose.swarm.yml whoosh"
|
||||
echo -e "${YELLOW} • Shell access:${NC} docker exec -it \$(docker ps -q -f name=whoosh_whoosh-backend) bash"
|
||||
|
||||
# Check agent connectivity
|
||||
echo -e "\n${CYAN}🔍 Testing Agent Connectivity:${NC}"
|
||||
@@ -220,7 +220,7 @@ for agent_info in "${agents[@]}"; do
|
||||
fi
|
||||
done
|
||||
|
||||
echo -e "\n${GREEN}🎉 Hive startup complete!${NC}"
|
||||
echo -e "\n${GREEN}🎉 WHOOSH startup complete!${NC}"
|
||||
echo -e "${CYAN}🐝 Welcome to the distributed AI future!${NC}"
|
||||
|
||||
log_info "Hive startup completed at $(date)"
|
||||
log_info "WHOOSH startup completed at $(date)"
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test script for Hive backend CLI agent integration
|
||||
Test script for WHOOSH backend CLI agent integration
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
@@ -12,7 +12,7 @@ import logging
|
||||
backend_path = os.path.join(os.path.dirname(__file__), '../../backend')
|
||||
sys.path.insert(0, backend_path)
|
||||
|
||||
from app.core.hive_coordinator import HiveCoordinator, Agent, AgentType
|
||||
from app.core.whoosh_coordinator import WHOOSHCoordinator, Agent, AgentType
|
||||
from app.cli_agents.cli_agent_manager import get_cli_agent_manager
|
||||
|
||||
# Configure logging
|
||||
@@ -50,13 +50,13 @@ async def test_cli_agent_manager():
|
||||
return False
|
||||
|
||||
|
||||
async def test_hive_coordinator_integration():
|
||||
"""Test Hive coordinator with CLI agents"""
|
||||
print("\n🤖 Testing Hive Coordinator Integration...")
|
||||
async def test_whoosh_coordinator_integration():
|
||||
"""Test WHOOSH coordinator with CLI agents"""
|
||||
print("\n🤖 Testing WHOOSH Coordinator Integration...")
|
||||
|
||||
try:
|
||||
# Initialize coordinator
|
||||
coordinator = HiveCoordinator()
|
||||
coordinator = WHOOSHCoordinator()
|
||||
await coordinator.initialize()
|
||||
|
||||
# Test CLI agent registration
|
||||
@@ -118,7 +118,7 @@ async def test_hive_coordinator_integration():
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Hive Coordinator integration test failed: {e}")
|
||||
print(f"❌ WHOOSH Coordinator integration test failed: {e}")
|
||||
return False
|
||||
|
||||
|
||||
@@ -127,7 +127,7 @@ async def test_mixed_agent_types():
|
||||
print("\n⚡ Testing Mixed Agent Types...")
|
||||
|
||||
try:
|
||||
coordinator = HiveCoordinator()
|
||||
coordinator = WHOOSHCoordinator()
|
||||
await coordinator.initialize()
|
||||
|
||||
# Add both Ollama and CLI agents (simulated)
|
||||
@@ -185,7 +185,7 @@ async def main():
|
||||
|
||||
tests = [
|
||||
("CLI Agent Manager", test_cli_agent_manager),
|
||||
("Hive Coordinator Integration", test_hive_coordinator_integration),
|
||||
("WHOOSH Coordinator Integration", test_whoosh_coordinator_integration),
|
||||
("Mixed Agent Types", test_mixed_agent_types)
|
||||
]
|
||||
|
||||
|
||||
@@ -4,37 +4,37 @@
|
||||
* Test MCP Server CLI Agent Integration
|
||||
*/
|
||||
|
||||
const { HiveClient } = require('../../mcp-server/dist/hive-client.js');
|
||||
const { HiveTools } = require('../../mcp-server/dist/hive-tools.js');
|
||||
const { WHOOSHClient } = require('../../mcp-server/dist/whoosh-client.js');
|
||||
const { WHOOSHTools } = require('../../mcp-server/dist/whoosh-tools.js');
|
||||
|
||||
async function testMCPIntegration() {
|
||||
console.log('🧪 Testing MCP Server CLI Agent Integration...\n');
|
||||
|
||||
try {
|
||||
// Initialize Hive client
|
||||
const hiveClient = new HiveClient({
|
||||
baseUrl: 'https://hive.home.deepblack.cloud/api',
|
||||
wsUrl: 'wss://hive.home.deepblack.cloud/socket.io',
|
||||
// Initialize WHOOSH client
|
||||
const whooshClient = new WHOOSHClient({
|
||||
baseUrl: 'https://whoosh.home.deepblack.cloud/api',
|
||||
wsUrl: 'wss://whoosh.home.deepblack.cloud/socket.io',
|
||||
timeout: 15000
|
||||
});
|
||||
|
||||
console.log('✅ HiveClient initialized');
|
||||
console.log('✅ WHOOSHClient initialized');
|
||||
|
||||
// Test connection
|
||||
try {
|
||||
await hiveClient.testConnection();
|
||||
console.log('✅ Connection to Hive backend successful');
|
||||
await whooshClient.testConnection();
|
||||
console.log('✅ Connection to WHOOSH backend successful');
|
||||
} catch (error) {
|
||||
console.log('⚠️ Connection test failed (backend may be offline):', error.message);
|
||||
console.log(' Continuing with tool definition tests...\n');
|
||||
}
|
||||
|
||||
// Initialize tools
|
||||
const hiveTools = new HiveTools(hiveClient);
|
||||
console.log('✅ HiveTools initialized');
|
||||
const whooshTools = new WHOOSHTools(whooshClient);
|
||||
console.log('✅ WHOOSHTools initialized');
|
||||
|
||||
// Test tool definitions
|
||||
const tools = hiveTools.getAllTools();
|
||||
const tools = whooshTools.getAllTools();
|
||||
console.log(`✅ Loaded ${tools.length} MCP tools\n`);
|
||||
|
||||
// Check for CLI agent tools
|
||||
@@ -49,9 +49,9 @@ async function testMCPIntegration() {
|
||||
});
|
||||
|
||||
// Test tool schema validation
|
||||
const registerCliTool = tools.find(t => t.name === 'hive_register_cli_agent');
|
||||
const registerCliTool = tools.find(t => t.name === 'whoosh_register_cli_agent');
|
||||
if (registerCliTool) {
|
||||
console.log('\n✅ hive_register_cli_agent tool found');
|
||||
console.log('\n✅ whoosh_register_cli_agent tool found');
|
||||
console.log(' Required fields:', registerCliTool.inputSchema.required);
|
||||
|
||||
const properties = registerCliTool.inputSchema.properties;
|
||||
@@ -61,7 +61,7 @@ async function testMCPIntegration() {
|
||||
console.log('❌ CLI agent tool schema missing required properties');
|
||||
}
|
||||
} else {
|
||||
console.log('❌ hive_register_cli_agent tool not found');
|
||||
console.log('❌ whoosh_register_cli_agent tool not found');
|
||||
}
|
||||
|
||||
// Test agent enumeration
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Comprehensive Testing Suite for Hive Distributed Workflows
|
||||
Comprehensive Testing Suite for WHOOSH Distributed Workflows
|
||||
Tests all aspects of the distributed development workflow system
|
||||
"""
|
||||
|
||||
@@ -551,7 +551,7 @@ class DistributedWorkflowTester:
|
||||
def generate_detailed_report(self) -> str:
|
||||
"""Generate a detailed test report"""
|
||||
report = []
|
||||
report.append("# Hive Distributed Workflow System - Test Report")
|
||||
report.append("# WHOOSH Distributed Workflow System - Test Report")
|
||||
report.append(f"Generated: {datetime.now().isoformat()}")
|
||||
report.append("")
|
||||
|
||||
@@ -598,11 +598,11 @@ class DistributedWorkflowTester:
|
||||
|
||||
async def main():
|
||||
"""Main test execution function"""
|
||||
parser = argparse.ArgumentParser(description="Test Hive Distributed Workflow System")
|
||||
parser = argparse.ArgumentParser(description="Test WHOOSH Distributed Workflow System")
|
||||
parser.add_argument(
|
||||
"--url",
|
||||
default="http://localhost:8000",
|
||||
help="Base URL for the Hive API (default: http://localhost:8000)"
|
||||
help="Base URL for the WHOOSH API (default: http://localhost:8000)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
|
||||
Reference in New Issue
Block a user