Major WHOOSH system refactoring and feature enhancements
- Migrated from HIVE branding to WHOOSH across all components - Enhanced backend API with new services: AI models, BZZZ integration, templates, members - Added comprehensive testing suite with security, performance, and integration tests - Improved frontend with new components for project setup, AI models, and team management - Updated MCP server implementation with WHOOSH-specific tools and resources - Enhanced deployment configurations with production-ready Docker setups - Added comprehensive documentation and setup guides - Implemented age encryption service and UCXL integration 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migration script to consolidate existing distributed AI projects into Hive
|
||||
Migration script to consolidate existing distributed AI projects into WHOOSH
|
||||
"""
|
||||
|
||||
import os
|
||||
@@ -18,13 +18,13 @@ from datetime import datetime
|
||||
sys.path.append('/home/tony/AI/projects/distributed-ai-dev')
|
||||
sys.path.append('/home/tony/AI/projects/McPlan/mcplan-web/backend')
|
||||
|
||||
class HiveMigrator:
|
||||
class WHOOSHMigrator:
|
||||
"""
|
||||
Migrates and consolidates data from existing distributed AI projects
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.hive_root = Path("/home/tony/AI/projects/hive")
|
||||
self.whoosh_root = Path("/home/tony/AI/projects/whoosh")
|
||||
self.projects = {
|
||||
'distributed-ai-dev': Path("/home/tony/AI/projects/distributed-ai-dev"),
|
||||
'mcplan': Path("/home/tony/AI/projects/McPlan"),
|
||||
@@ -50,11 +50,11 @@ class HiveMigrator:
|
||||
|
||||
async def migrate_all(self):
|
||||
"""Execute complete migration process"""
|
||||
self.log("🚀 Starting Hive migration from existing projects")
|
||||
self.log("🚀 Starting WHOOSH migration from existing projects")
|
||||
|
||||
try:
|
||||
# Phase 1: Setup and validation
|
||||
await self.setup_hive_structure()
|
||||
await self.setup_whoosh_structure()
|
||||
await self.validate_source_projects()
|
||||
|
||||
# Phase 2: Configuration migration
|
||||
@@ -78,9 +78,9 @@ class HiveMigrator:
|
||||
self.error(f"Migration failed: {str(e)}")
|
||||
raise
|
||||
|
||||
async def setup_hive_structure(self):
|
||||
"""Create Hive project directory structure"""
|
||||
self.log("📁 Setting up Hive project structure")
|
||||
async def setup_whoosh_structure(self):
|
||||
"""Create WHOOSH project directory structure"""
|
||||
self.log("📁 Setting up WHOOSH project structure")
|
||||
|
||||
directories = [
|
||||
"backend/app/core",
|
||||
@@ -114,7 +114,7 @@ class HiveMigrator:
|
||||
]
|
||||
|
||||
for directory in directories:
|
||||
(self.hive_root / directory).mkdir(parents=True, exist_ok=True)
|
||||
(self.whoosh_root / directory).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.log(f"Created {len(directories)} directories")
|
||||
|
||||
@@ -141,9 +141,9 @@ class HiveMigrator:
|
||||
with open(source_config, 'r') as f:
|
||||
agents_config = yaml.safe_load(f)
|
||||
|
||||
# Create enhanced Hive configuration
|
||||
hive_config = {
|
||||
'hive': {
|
||||
# Create enhanced WHOOSH configuration
|
||||
whoosh_config = {
|
||||
'whoosh': {
|
||||
'cluster': {
|
||||
'name': 'Development Cluster',
|
||||
'region': 'home.deepblack.cloud'
|
||||
@@ -200,11 +200,11 @@ class HiveMigrator:
|
||||
'max_response_time': 30
|
||||
})
|
||||
}
|
||||
hive_config['hive']['agents'][agent_id] = enhanced_config
|
||||
whoosh_config['whoosh']['agents'][agent_id] = enhanced_config
|
||||
|
||||
# Add default agents if none exist
|
||||
if not hive_config['hive']['agents']:
|
||||
hive_config['hive']['agents'] = {
|
||||
if not whoosh_config['whoosh']['agents']:
|
||||
whoosh_config['whoosh']['agents'] = {
|
||||
'acacia': {
|
||||
'name': 'ACACIA Infrastructure Specialist',
|
||||
'endpoint': 'http://192.168.1.72:11434',
|
||||
@@ -256,11 +256,11 @@ class HiveMigrator:
|
||||
}
|
||||
|
||||
# Save unified configuration
|
||||
config_path = self.hive_root / 'config' / 'hive.yaml'
|
||||
config_path = self.whoosh_root / 'config' / 'whoosh.yaml'
|
||||
with open(config_path, 'w') as f:
|
||||
yaml.dump(hive_config, f, default_flow_style=False, sort_keys=False)
|
||||
yaml.dump(whoosh_config, f, default_flow_style=False, sort_keys=False)
|
||||
|
||||
self.log(f"✅ Migrated {len(hive_config['hive']['agents'])} agent configurations")
|
||||
self.log(f"✅ Migrated {len(whoosh_config['whoosh']['agents'])} agent configurations")
|
||||
|
||||
async def migrate_monitoring_configs(self):
|
||||
"""Migrate monitoring configurations from cluster project"""
|
||||
@@ -272,15 +272,15 @@ class HiveMigrator:
|
||||
'scrape_interval': '30s',
|
||||
'evaluation_interval': '30s'
|
||||
},
|
||||
'rule_files': ['hive_alerts.yml'],
|
||||
'rule_files': ['whoosh_alerts.yml'],
|
||||
'scrape_configs': [
|
||||
{
|
||||
'job_name': 'hive-backend',
|
||||
'static_configs': [{'targets': ['hive-coordinator:8000']}],
|
||||
'job_name': 'whoosh-backend',
|
||||
'static_configs': [{'targets': ['whoosh-coordinator:8000']}],
|
||||
'metrics_path': '/api/metrics'
|
||||
},
|
||||
{
|
||||
'job_name': 'hive-agents',
|
||||
'job_name': 'whoosh-agents',
|
||||
'static_configs': [
|
||||
{'targets': ['192.168.1.72:11434']},
|
||||
{'targets': ['192.168.1.27:11434']},
|
||||
@@ -290,15 +290,15 @@ class HiveMigrator:
|
||||
]
|
||||
}
|
||||
|
||||
prometheus_path = self.hive_root / 'config' / 'monitoring' / 'prometheus.yml'
|
||||
prometheus_path = self.whoosh_root / 'config' / 'monitoring' / 'prometheus.yml'
|
||||
with open(prometheus_path, 'w') as f:
|
||||
yaml.dump(prometheus_config, f)
|
||||
|
||||
# Create Grafana dashboard configurations
|
||||
grafana_config = {
|
||||
'dashboards': {
|
||||
'hive_overview': {
|
||||
'title': 'Hive Cluster Overview',
|
||||
'whoosh_overview': {
|
||||
'title': 'WHOOSH Cluster Overview',
|
||||
'panels': [
|
||||
'Agent Status',
|
||||
'Task Queue Length',
|
||||
@@ -319,7 +319,7 @@ class HiveMigrator:
|
||||
}
|
||||
}
|
||||
|
||||
grafana_path = self.hive_root / 'config' / 'monitoring' / 'grafana.yml'
|
||||
grafana_path = self.whoosh_root / 'config' / 'monitoring' / 'grafana.yml'
|
||||
with open(grafana_path, 'w') as f:
|
||||
yaml.dump(grafana_config, f)
|
||||
|
||||
@@ -332,7 +332,7 @@ class HiveMigrator:
|
||||
# Map of source files to destination files
|
||||
component_mapping = {
|
||||
# From distributed-ai-dev
|
||||
'distributed-ai-dev/src/core/ai_dev_coordinator.py': 'backend/app/core/hive_coordinator.py',
|
||||
'distributed-ai-dev/src/core/ai_dev_coordinator.py': 'backend/app/core/whoosh_coordinator.py',
|
||||
'distributed-ai-dev/src/monitoring/performance_monitor.py': 'backend/app/core/performance_monitor.py',
|
||||
'distributed-ai-dev/src/config/agent_manager.py': 'backend/app/core/agent_manager.py',
|
||||
|
||||
@@ -357,7 +357,7 @@ class HiveMigrator:
|
||||
break
|
||||
|
||||
if source_path:
|
||||
dest_path = self.hive_root / dest_rel
|
||||
dest_path = self.whoosh_root / dest_rel
|
||||
|
||||
if source_path.is_file():
|
||||
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
@@ -377,7 +377,7 @@ class HiveMigrator:
|
||||
self.log("🗄️ Creating unified database schema")
|
||||
|
||||
schema_sql = """
|
||||
-- Hive Unified Database Schema
|
||||
-- WHOOSH Unified Database Schema
|
||||
|
||||
-- User Management
|
||||
CREATE TABLE users (
|
||||
@@ -497,11 +497,11 @@ CREATE INDEX idx_alerts_unresolved ON alerts(resolved, created_at) WHERE resolve
|
||||
|
||||
-- Sample data
|
||||
INSERT INTO users (email, hashed_password, role) VALUES
|
||||
('admin@hive.local', '$2b$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/lewohT6ZErjH.2T.2', 'admin'),
|
||||
('developer@hive.local', '$2b$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/lewohT6ZErjH.2T.2', 'developer');
|
||||
('admin@whoosh.local', '$2b$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/lewohT6ZErjH.2T.2', 'admin'),
|
||||
('developer@whoosh.local', '$2b$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/lewohT6ZErjH.2T.2', 'developer');
|
||||
"""
|
||||
|
||||
schema_path = self.hive_root / 'backend' / 'migrations' / '001_initial_schema.sql'
|
||||
schema_path = self.whoosh_root / 'backend' / 'migrations' / '001_initial_schema.sql'
|
||||
with open(schema_path, 'w') as f:
|
||||
f.write(schema_sql)
|
||||
|
||||
@@ -537,8 +537,8 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
'migrated_at': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
# Save to Hive workflows directory
|
||||
dest_file = self.hive_root / 'config' / 'workflows' / f'{workflow_file.stem}.json'
|
||||
# Save to WHOOSH workflows directory
|
||||
dest_file = self.whoosh_root / 'config' / 'workflows' / f'{workflow_file.stem}.json'
|
||||
with open(dest_file, 'w') as f:
|
||||
json.dump(migration_record, f, indent=2)
|
||||
|
||||
@@ -591,7 +591,7 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
conn.close()
|
||||
|
||||
# Save migration data
|
||||
migration_file = self.hive_root / 'scripts' / 'mcplan_data_export.json'
|
||||
migration_file = self.whoosh_root / 'scripts' / 'mcplan_data_export.json'
|
||||
with open(migration_file, 'w') as f:
|
||||
json.dump(migration_data, f, indent=2, default=str)
|
||||
|
||||
@@ -609,11 +609,11 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
'migration_summary': {
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'source_projects': list(self.projects.keys()),
|
||||
'hive_version': '1.0.0',
|
||||
'whoosh_version': '1.0.0',
|
||||
'migration_status': 'completed' if not self.errors else 'completed_with_errors'
|
||||
},
|
||||
'components_migrated': {
|
||||
'agent_configurations': 'config/hive.yaml',
|
||||
'agent_configurations': 'config/whoosh.yaml',
|
||||
'monitoring_configs': 'config/monitoring/',
|
||||
'database_schema': 'backend/migrations/001_initial_schema.sql',
|
||||
'core_components': 'backend/app/core/',
|
||||
@@ -634,12 +634,12 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
'errors': self.errors
|
||||
}
|
||||
|
||||
report_path = self.hive_root / 'MIGRATION_REPORT.json'
|
||||
report_path = self.whoosh_root / 'MIGRATION_REPORT.json'
|
||||
with open(report_path, 'w') as f:
|
||||
json.dump(report, f, indent=2)
|
||||
|
||||
# Also create a markdown summary
|
||||
md_report = f"""# Hive Migration Report
|
||||
md_report = f"""# WHOOSH Migration Report
|
||||
|
||||
## Summary
|
||||
- **Migration Date**: {report['migration_summary']['timestamp']}
|
||||
@@ -666,7 +666,7 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
for error in self.errors:
|
||||
md_report += f"- {error}\n"
|
||||
|
||||
md_report_path = self.hive_root / 'MIGRATION_REPORT.md'
|
||||
md_report_path = self.whoosh_root / 'MIGRATION_REPORT.md'
|
||||
with open(md_report_path, 'w') as f:
|
||||
f.write(md_report)
|
||||
|
||||
@@ -676,20 +676,20 @@ INSERT INTO users (email, hashed_password, role) VALUES
|
||||
|
||||
async def main():
|
||||
"""Main migration function"""
|
||||
migrator = HiveMigrator()
|
||||
migrator = WHOOSHMigrator()
|
||||
|
||||
try:
|
||||
await migrator.migrate_all()
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("🎉 HIVE MIGRATION COMPLETED!")
|
||||
print("🎉 WHOOSH MIGRATION COMPLETED!")
|
||||
print("="*60)
|
||||
print(f"✅ Migration successful with {len(migrator.errors)} errors")
|
||||
print(f"📁 Hive project created at: {migrator.hive_root}")
|
||||
print(f"📁 WHOOSH project created at: {migrator.whoosh_root}")
|
||||
print(f"📋 Check MIGRATION_REPORT.md for detailed results")
|
||||
print("\nNext steps:")
|
||||
print("1. cd /home/tony/AI/projects/hive")
|
||||
print("2. Review config/hive.yaml")
|
||||
print("1. cd /home/tony/AI/projects/whoosh")
|
||||
print("2. Review config/whoosh.yaml")
|
||||
print("3. docker-compose up -d")
|
||||
print("4. Visit http://localhost:3000")
|
||||
print("="*60)
|
||||
|
||||
Reference in New Issue
Block a user