Initial commit: Complete Hive distributed AI orchestration platform

This comprehensive implementation includes:
- FastAPI backend with MCP server integration
- React/TypeScript frontend with Vite
- PostgreSQL database with Redis caching
- Grafana/Prometheus monitoring stack
- Docker Compose orchestration
- Full MCP protocol support for Claude Code integration

Features:
- Agent discovery and management across network
- Visual workflow editor and execution engine
- Real-time task coordination and monitoring
- Multi-model support with specialized agents
- Distributed development task allocation

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
anthonyrawlins
2025-07-07 21:44:31 +10:00
commit d7ad321176
2631 changed files with 870175 additions and 0 deletions

View File

@@ -0,0 +1,195 @@
/**
* Hive Client
*
* Handles communication with the Hive backend API
*/
import axios, { AxiosInstance } from 'axios';
import WebSocket from 'ws';
export interface HiveConfig {
baseUrl: string;
wsUrl: string;
timeout: number;
}
export interface Agent {
id: string;
endpoint: string;
model: string;
specialty: string;
status: 'available' | 'busy' | 'offline';
current_tasks: number;
max_concurrent: number;
}
export interface Task {
id: string;
type: string;
priority: number;
context: Record<string, any>;
status: 'pending' | 'in_progress' | 'completed' | 'failed';
assigned_agent?: string;
result?: Record<string, any>;
created_at: string;
completed_at?: string;
}
export interface ClusterStatus {
system: {
status: string;
uptime: number;
version: string;
};
agents: {
total: number;
available: number;
busy: number;
};
tasks: {
total: number;
pending: number;
running: number;
completed: number;
failed: number;
};
}
export class HiveClient {
private api: AxiosInstance;
private config: HiveConfig;
private wsConnection?: WebSocket;
constructor(config?: Partial<HiveConfig>) {
this.config = {
baseUrl: process.env.HIVE_API_URL || 'http://localhost:8087',
wsUrl: process.env.HIVE_WS_URL || 'ws://localhost:8087',
timeout: 30000,
...config,
};
this.api = axios.create({
baseURL: this.config.baseUrl,
timeout: this.config.timeout,
headers: {
'Content-Type': 'application/json',
},
});
}
async testConnection(): Promise<boolean> {
try {
const response = await this.api.get('/health');
return response.data.status === 'healthy';
} catch (error) {
throw new Error(`Failed to connect to Hive: ${error}`);
}
}
// Agent Management
async getAgents(): Promise<Agent[]> {
const response = await this.api.get('/api/agents');
return response.data.agents || [];
}
async registerAgent(agentData: Partial<Agent>): Promise<{ agent_id: string }> {
const response = await this.api.post('/api/agents', agentData);
return response.data;
}
// Task Management
async createTask(taskData: {
type: string;
priority: number;
context: Record<string, any>;
}): Promise<Task> {
const response = await this.api.post('/api/tasks', taskData);
return response.data;
}
async getTask(taskId: string): Promise<Task> {
const response = await this.api.get(`/api/tasks/${taskId}`);
return response.data;
}
async getTasks(filters?: {
status?: string;
agent?: string;
limit?: number;
}): Promise<Task[]> {
const params = new URLSearchParams();
if (filters?.status) params.append('status', filters.status);
if (filters?.agent) params.append('agent', filters.agent);
if (filters?.limit) params.append('limit', filters.limit.toString());
const response = await this.api.get(`/api/tasks?${params}`);
return response.data.tasks || [];
}
// Workflow Management
async getWorkflows(): Promise<any[]> {
const response = await this.api.get('/api/workflows');
return response.data.workflows || [];
}
async createWorkflow(workflowData: Record<string, any>): Promise<{ workflow_id: string }> {
const response = await this.api.post('/api/workflows', workflowData);
return response.data;
}
async executeWorkflow(workflowId: string, inputs?: Record<string, any>): Promise<{ execution_id: string }> {
const response = await this.api.post(`/api/workflows/${workflowId}/execute`, { inputs });
return response.data;
}
// Monitoring and Status
async getClusterStatus(): Promise<ClusterStatus> {
const response = await this.api.get('/api/status');
return response.data;
}
async getMetrics(): Promise<string> {
const response = await this.api.get('/api/metrics');
return response.data;
}
async getExecutions(workflowId?: string): Promise<any[]> {
const url = workflowId ? `/api/executions?workflow_id=${workflowId}` : '/api/executions';
const response = await this.api.get(url);
return response.data.executions || [];
}
// Real-time Updates via WebSocket
async connectWebSocket(topic: string = 'general'): Promise<WebSocket> {
return new Promise((resolve, reject) => {
const ws = new WebSocket(`${this.config.wsUrl}/ws/${topic}`);
ws.on('open', () => {
console.log(`🔗 Connected to Hive WebSocket (${topic})`);
this.wsConnection = ws;
resolve(ws);
});
ws.on('error', (error) => {
console.error('WebSocket error:', error);
reject(error);
});
ws.on('message', (data) => {
try {
const message = JSON.parse(data.toString());
console.log('📨 Hive update:', message);
} catch (error) {
console.error('Failed to parse WebSocket message:', error);
}
});
});
}
async disconnect(): Promise<void> {
if (this.wsConnection) {
this.wsConnection.close();
this.wsConnection = undefined;
}
}
}

View File

@@ -0,0 +1,415 @@
/**
* Hive Resources
*
* Defines MCP resources that expose Hive cluster state and real-time data
*/
import { Resource } from '@modelcontextprotocol/sdk/types.js';
import { HiveClient } from './hive-client.js';
export class HiveResources {
private hiveClient: HiveClient;
constructor(hiveClient: HiveClient) {
this.hiveClient = hiveClient;
}
async getAllResources(): Promise<Resource[]> {
return [
{
uri: 'hive://cluster/status',
name: 'Cluster Status',
description: 'Real-time status of the entire Hive cluster including agents and tasks',
mimeType: 'application/json',
},
{
uri: 'hive://agents/list',
name: 'Agent Registry',
description: 'List of all registered AI agents with their capabilities and current status',
mimeType: 'application/json',
},
{
uri: 'hive://tasks/active',
name: 'Active Tasks',
description: 'Currently running and pending tasks across the cluster',
mimeType: 'application/json',
},
{
uri: 'hive://tasks/completed',
name: 'Completed Tasks',
description: 'Recently completed tasks with results and performance metrics',
mimeType: 'application/json',
},
{
uri: 'hive://workflows/available',
name: 'Available Workflows',
description: 'All configured workflows ready for execution',
mimeType: 'application/json',
},
{
uri: 'hive://executions/recent',
name: 'Recent Executions',
description: 'Recent workflow executions with status and results',
mimeType: 'application/json',
},
{
uri: 'hive://metrics/prometheus',
name: 'Cluster Metrics',
description: 'Prometheus metrics for monitoring cluster performance',
mimeType: 'text/plain',
},
{
uri: 'hive://capabilities/overview',
name: 'Cluster Capabilities',
description: 'Overview of available agent types and their specializations',
mimeType: 'application/json',
},
];
}
async readResource(uri: string): Promise<{ contents: Array<{ type: string; text?: string; data?: string; mimeType?: string }> }> {
try {
switch (uri) {
case 'hive://cluster/status':
return await this.getClusterStatusResource();
case 'hive://agents/list':
return await this.getAgentsResource();
case 'hive://tasks/active':
return await this.getActiveTasksResource();
case 'hive://tasks/completed':
return await this.getCompletedTasksResource();
case 'hive://workflows/available':
return await this.getWorkflowsResource();
case 'hive://executions/recent':
return await this.getExecutionsResource();
case 'hive://metrics/prometheus':
return await this.getMetricsResource();
case 'hive://capabilities/overview':
return await this.getCapabilitiesResource();
default:
throw new Error(`Resource not found: ${uri}`);
}
} catch (error) {
return {
contents: [
{
type: 'text',
text: `Error reading resource ${uri}: ${error instanceof Error ? error.message : String(error)}`,
},
],
};
}
}
private async getClusterStatusResource() {
const status = await this.hiveClient.getClusterStatus();
return {
contents: [
{
type: 'text',
data: JSON.stringify(status, null, 2),
mimeType: 'application/json',
},
],
};
}
private async getAgentsResource() {
const agents = await this.hiveClient.getAgents();
const agentData = {
total_agents: agents.length,
agents: agents.map(agent => ({
id: agent.id,
specialty: agent.specialty,
model: agent.model,
endpoint: agent.endpoint,
status: agent.status,
current_tasks: agent.current_tasks,
max_concurrent: agent.max_concurrent,
utilization: agent.max_concurrent > 0 ? (agent.current_tasks / agent.max_concurrent * 100).toFixed(1) + '%' : '0%',
})),
by_specialty: this.groupAgentsBySpecialty(agents),
availability_summary: {
available: agents.filter(a => a.status === 'available').length,
busy: agents.filter(a => a.status === 'busy').length,
offline: agents.filter(a => a.status === 'offline').length,
},
};
return {
contents: [
{
type: 'text',
data: JSON.stringify(agentData, null, 2),
mimeType: 'application/json',
},
],
};
}
private async getActiveTasksResource() {
const pendingTasks = await this.hiveClient.getTasks({ status: 'pending', limit: 50 });
const runningTasks = await this.hiveClient.getTasks({ status: 'in_progress', limit: 50 });
const activeData = {
summary: {
pending: pendingTasks.length,
running: runningTasks.length,
total_active: pendingTasks.length + runningTasks.length,
},
pending_tasks: pendingTasks.map(this.formatTaskForResource),
running_tasks: runningTasks.map(this.formatTaskForResource),
queue_analysis: this.analyzeTaskQueue(pendingTasks),
};
return {
contents: [
{
type: 'text',
data: JSON.stringify(activeData, null, 2),
mimeType: 'application/json',
},
],
};
}
private async getCompletedTasksResource() {
const completedTasks = await this.hiveClient.getTasks({ status: 'completed', limit: 20 });
const failedTasks = await this.hiveClient.getTasks({ status: 'failed', limit: 10 });
const completedData = {
summary: {
completed: completedTasks.length,
failed: failedTasks.length,
success_rate: completedTasks.length + failedTasks.length > 0
? ((completedTasks.length / (completedTasks.length + failedTasks.length)) * 100).toFixed(1) + '%'
: 'N/A',
},
recent_completed: completedTasks.map(this.formatTaskForResource),
recent_failed: failedTasks.map(this.formatTaskForResource),
performance_metrics: this.calculateTaskMetrics(completedTasks),
};
return {
contents: [
{
type: 'text',
data: JSON.stringify(completedData, null, 2),
mimeType: 'application/json',
},
],
};
}
private async getWorkflowsResource() {
const workflows = await this.hiveClient.getWorkflows();
const workflowData = {
total_workflows: workflows.length,
workflows: workflows.map(wf => ({
id: wf.id,
name: wf.name || 'Unnamed Workflow',
description: wf.description || 'No description',
status: wf.status || 'unknown',
created: wf.created_at || 'unknown',
steps: wf.steps?.length || 0,
})),
};
return {
contents: [
{
type: 'text',
data: JSON.stringify(workflowData, null, 2),
mimeType: 'application/json',
},
],
};
}
private async getExecutionsResource() {
const executions = await this.hiveClient.getExecutions();
const executionData = {
total_executions: executions.length,
recent_executions: executions.slice(0, 10).map(exec => ({
id: exec.id,
workflow_id: exec.workflow_id,
status: exec.status,
started_at: exec.started_at,
completed_at: exec.completed_at,
duration: exec.completed_at && exec.started_at
? this.calculateDuration(exec.started_at, exec.completed_at)
: null,
})),
status_summary: this.summarizeExecutionStatuses(executions),
};
return {
contents: [
{
type: 'text',
data: JSON.stringify(executionData, null, 2),
mimeType: 'application/json',
},
],
};
}
private async getMetricsResource() {
const metrics = await this.hiveClient.getMetrics();
return {
contents: [
{
type: 'text',
text: metrics,
mimeType: 'text/plain',
},
],
};
}
private async getCapabilitiesResource() {
const agents = await this.hiveClient.getAgents();
const capabilities = {
agent_specializations: {
kernel_dev: {
description: 'GPU kernel development, HIP/CUDA optimization, memory coalescing',
available_agents: agents.filter(a => a.specialty === 'kernel_dev').length,
typical_models: ['codellama:34b', 'deepseek-coder:33b'],
},
pytorch_dev: {
description: 'PyTorch backend development, autograd, TunableOp integration',
available_agents: agents.filter(a => a.specialty === 'pytorch_dev').length,
typical_models: ['deepseek-coder:33b', 'codellama:34b'],
},
profiler: {
description: 'Performance analysis, GPU profiling, bottleneck identification',
available_agents: agents.filter(a => a.specialty === 'profiler').length,
typical_models: ['llama3:70b', 'mixtral:8x7b'],
},
docs_writer: {
description: 'Technical documentation, API docs, tutorials, examples',
available_agents: agents.filter(a => a.specialty === 'docs_writer').length,
typical_models: ['llama3:70b', 'claude-3-haiku'],
},
tester: {
description: 'Test creation, benchmarking, CI/CD, edge case handling',
available_agents: agents.filter(a => a.specialty === 'tester').length,
typical_models: ['codellama:34b', 'deepseek-coder:33b'],
},
},
cluster_capacity: {
total_agents: agents.length,
total_concurrent_capacity: agents.reduce((sum, agent) => sum + agent.max_concurrent, 0),
current_utilization: agents.reduce((sum, agent) => sum + agent.current_tasks, 0),
},
supported_frameworks: [
'ROCm/HIP', 'PyTorch', 'CUDA', 'OpenMP', 'MPI', 'Composable Kernel'
],
target_architectures: [
'RDNA3', 'CDNA3', 'RDNA2', 'Vega', 'NVIDIA GPUs (via CUDA)'
],
};
return {
contents: [
{
type: 'text',
data: JSON.stringify(capabilities, null, 2),
mimeType: 'application/json',
},
],
};
}
// Helper Methods
private groupAgentsBySpecialty(agents: any[]) {
const grouped: Record<string, any[]> = {};
agents.forEach(agent => {
if (!grouped[agent.specialty]) {
grouped[agent.specialty] = [];
}
grouped[agent.specialty].push(agent);
});
return grouped;
}
private formatTaskForResource(task: any) {
return {
id: task.id,
type: task.type,
priority: task.priority,
status: task.status,
assigned_agent: task.assigned_agent,
created_at: task.created_at,
completed_at: task.completed_at,
objective: task.context?.objective || 'No objective specified',
};
}
private analyzeTaskQueue(tasks: any[]) {
const byType = tasks.reduce((acc, task) => {
acc[task.type] = (acc[task.type] || 0) + 1;
return acc;
}, {} as Record<string, number>);
const byPriority = tasks.reduce((acc, task) => {
const priority = `priority_${task.priority}`;
acc[priority] = (acc[priority] || 0) + 1;
return acc;
}, {} as Record<string, number>);
return {
by_type: byType,
by_priority: byPriority,
average_priority: tasks.length > 0
? (tasks.reduce((sum, task) => sum + task.priority, 0) / tasks.length).toFixed(1)
: 0,
};
}
private calculateTaskMetrics(tasks: any[]) {
if (tasks.length === 0) return null;
const durations = tasks
.filter(task => task.created_at && task.completed_at)
.map(task => new Date(task.completed_at).getTime() - new Date(task.created_at).getTime());
if (durations.length === 0) return null;
return {
average_duration_ms: Math.round(durations.reduce((a, b) => a + b, 0) / durations.length),
min_duration_ms: Math.min(...durations),
max_duration_ms: Math.max(...durations),
total_tasks_analyzed: durations.length,
};
}
private summarizeExecutionStatuses(executions: any[]) {
return executions.reduce((acc, exec) => {
acc[exec.status] = (acc[exec.status] || 0) + 1;
return acc;
}, {} as Record<string, number>);
}
private calculateDuration(start: string, end: string): string {
const duration = new Date(end).getTime() - new Date(start).getTime();
const minutes = Math.floor(duration / 60000);
const seconds = Math.floor((duration % 60000) / 1000);
return `${minutes}m ${seconds}s`;
}
}

View File

@@ -0,0 +1,665 @@
/**
* Hive Tools
*
* Defines MCP tools that expose Hive operations to AI assistants
*/
import { Tool } from '@modelcontextprotocol/sdk/types.js';
import { HiveClient, Agent, Task } from './hive-client.js';
import { v4 as uuidv4 } from 'uuid';
import { spawn } from 'child_process';
import { promisify } from 'util';
import * as path from 'path';
export class HiveTools {
private hiveClient: HiveClient;
constructor(hiveClient: HiveClient) {
this.hiveClient = hiveClient;
}
getAllTools(): Tool[] {
return [
// Agent Management Tools
{
name: 'hive_get_agents',
description: 'Get all registered AI agents in the Hive cluster with their current status',
inputSchema: {
type: 'object',
properties: {},
},
},
{
name: 'hive_register_agent',
description: 'Register a new AI agent in the Hive cluster',
inputSchema: {
type: 'object',
properties: {
id: { type: 'string', description: 'Unique agent identifier' },
endpoint: { type: 'string', description: 'Agent API endpoint URL' },
model: { type: 'string', description: 'Model name (e.g., codellama:34b)' },
specialty: {
type: 'string',
enum: ['kernel_dev', 'pytorch_dev', 'profiler', 'docs_writer', 'tester'],
description: 'Agent specialization area'
},
max_concurrent: { type: 'number', description: 'Maximum concurrent tasks', default: 2 },
},
required: ['id', 'endpoint', 'model', 'specialty'],
},
},
// Task Management Tools
{
name: 'hive_create_task',
description: 'Create and assign a development task to the Hive cluster',
inputSchema: {
type: 'object',
properties: {
type: {
type: 'string',
enum: ['kernel_dev', 'pytorch_dev', 'profiler', 'docs_writer', 'tester'],
description: 'Type of development task'
},
priority: {
type: 'number',
minimum: 1,
maximum: 5,
description: 'Task priority (1=low, 5=high)'
},
objective: { type: 'string', description: 'Main objective or goal of the task' },
context: {
type: 'object',
description: 'Additional context, files, constraints, requirements',
properties: {
files: { type: 'array', items: { type: 'string' }, description: 'Related file paths' },
constraints: { type: 'array', items: { type: 'string' }, description: 'Development constraints' },
requirements: { type: 'array', items: { type: 'string' }, description: 'Specific requirements' },
reference: { type: 'string', description: 'Reference documentation or links' }
}
},
},
required: ['type', 'priority', 'objective'],
},
},
{
name: 'hive_get_task',
description: 'Get details and status of a specific task',
inputSchema: {
type: 'object',
properties: {
task_id: { type: 'string', description: 'Task identifier' },
},
required: ['task_id'],
},
},
{
name: 'hive_get_tasks',
description: 'Get list of tasks with optional filtering',
inputSchema: {
type: 'object',
properties: {
status: {
type: 'string',
enum: ['pending', 'in_progress', 'completed', 'failed'],
description: 'Filter by task status'
},
agent: { type: 'string', description: 'Filter by assigned agent ID' },
limit: { type: 'number', description: 'Maximum number of tasks to return', default: 20 },
},
},
},
// Workflow Management Tools
{
name: 'hive_get_workflows',
description: 'Get all available workflows in the Hive platform',
inputSchema: {
type: 'object',
properties: {},
},
},
{
name: 'hive_create_workflow',
description: 'Create a new workflow for distributed task orchestration',
inputSchema: {
type: 'object',
properties: {
name: { type: 'string', description: 'Workflow name' },
description: { type: 'string', description: 'Workflow description' },
steps: {
type: 'array',
description: 'Workflow steps in order',
items: {
type: 'object',
properties: {
name: { type: 'string' },
type: { type: 'string' },
agent_type: { type: 'string' },
inputs: { type: 'object' },
outputs: { type: 'array', items: { type: 'string' } }
}
}
},
},
required: ['name', 'steps'],
},
},
{
name: 'hive_execute_workflow',
description: 'Execute a workflow with optional input parameters',
inputSchema: {
type: 'object',
properties: {
workflow_id: { type: 'string', description: 'Workflow identifier' },
inputs: {
type: 'object',
description: 'Input parameters for workflow execution',
additionalProperties: true
},
},
required: ['workflow_id'],
},
},
// Monitoring and Status Tools
{
name: 'hive_get_cluster_status',
description: 'Get comprehensive status of the entire Hive cluster',
inputSchema: {
type: 'object',
properties: {},
},
},
{
name: 'hive_get_metrics',
description: 'Get Prometheus metrics from the Hive cluster',
inputSchema: {
type: 'object',
properties: {},
},
},
{
name: 'hive_get_executions',
description: 'Get workflow execution history and status',
inputSchema: {
type: 'object',
properties: {
workflow_id: { type: 'string', description: 'Filter by specific workflow ID' },
},
},
},
// Coordination Tools
{
name: 'hive_coordinate_development',
description: 'Coordinate a complex development task across multiple specialized agents',
inputSchema: {
type: 'object',
properties: {
project_description: { type: 'string', description: 'Overall project or feature description' },
breakdown: {
type: 'array',
description: 'Task breakdown by specialization',
items: {
type: 'object',
properties: {
specialization: { type: 'string', enum: ['kernel_dev', 'pytorch_dev', 'profiler', 'docs_writer', 'tester'] },
task_description: { type: 'string' },
dependencies: { type: 'array', items: { type: 'string' } },
priority: { type: 'number', minimum: 1, maximum: 5 }
}
}
},
coordination_strategy: {
type: 'string',
enum: ['sequential', 'parallel', 'mixed'],
description: 'How to coordinate the tasks',
default: 'mixed'
},
},
required: ['project_description', 'breakdown'],
},
},
// Cluster Management Tools
{
name: 'hive_bring_online',
description: 'Automatically discover and register all available Ollama agents on the network, bringing the entire Hive cluster online',
inputSchema: {
type: 'object',
properties: {
force_refresh: {
type: 'boolean',
description: 'Force refresh of all agents (re-register existing ones)',
default: false
},
subnet_scan: {
type: 'boolean',
description: 'Perform full subnet scan for discovery',
default: true
},
},
},
},
];
}
async executeTool(name: string, args: Record<string, any>): Promise<any> {
try {
switch (name) {
// Agent Management
case 'hive_get_agents':
return await this.getAgents();
case 'hive_register_agent':
return await this.registerAgent(args);
// Task Management
case 'hive_create_task':
return await this.createTask(args);
case 'hive_get_task':
return await this.getTask(args.task_id);
case 'hive_get_tasks':
return await this.getTasks(args);
// Workflow Management
case 'hive_get_workflows':
return await this.getWorkflows();
case 'hive_create_workflow':
return await this.createWorkflow(args);
case 'hive_execute_workflow':
return await this.executeWorkflow(args.workflow_id, args.inputs);
// Monitoring
case 'hive_get_cluster_status':
return await this.getClusterStatus();
case 'hive_get_metrics':
return await this.getMetrics();
case 'hive_get_executions':
return await this.getExecutions(args.workflow_id);
// Coordination
case 'hive_coordinate_development':
return await this.coordinateDevelopment(args);
// Cluster Management
case 'hive_bring_online':
return await this.bringHiveOnline(args);
default:
throw new Error(`Unknown tool: ${name}`);
}
} catch (error) {
return {
content: [
{
type: 'text',
text: `Error executing ${name}: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
// Tool Implementation Methods
private async getAgents() {
const agents = await this.hiveClient.getAgents();
return {
content: [
{
type: 'text',
text: `📋 Hive Cluster Agents (${agents.length} total):\n\n${agents.length > 0
? agents.map(agent =>
`🤖 **${agent.id}** (${agent.specialty})\n` +
` • Model: ${agent.model}\n` +
` • Endpoint: ${agent.endpoint}\n` +
` • Status: ${agent.status}\n` +
` • Tasks: ${agent.current_tasks}/${agent.max_concurrent}\n`
).join('\n')
: 'No agents registered yet. Use hive_register_agent to add agents to the cluster.'
}`,
},
],
};
}
private async registerAgent(args: any) {
const result = await this.hiveClient.registerAgent(args);
return {
content: [
{
type: 'text',
text: `✅ Successfully registered agent **${args.id}** in the Hive cluster!\n\n` +
`🤖 Agent Details:\n` +
`• ID: ${args.id}\n` +
`• Specialization: ${args.specialty}\n` +
`• Model: ${args.model}\n` +
`• Endpoint: ${args.endpoint}\n` +
`• Max Concurrent Tasks: ${args.max_concurrent || 2}`,
},
],
};
}
private async createTask(args: any) {
const taskData = {
type: args.type,
priority: args.priority,
context: {
objective: args.objective,
...args.context,
},
};
const task = await this.hiveClient.createTask(taskData);
return {
content: [
{
type: 'text',
text: `🎯 Created development task **${task.id}**\n\n` +
`📋 Task Details:\n` +
`• Type: ${task.type}\n` +
`• Priority: ${task.priority}/5\n` +
`• Status: ${task.status}\n` +
`• Objective: ${args.objective}\n` +
`• Created: ${task.created_at}\n\n` +
`The task has been queued and will be assigned to an available ${task.type} agent.`,
},
],
};
}
private async getTask(taskId: string) {
const task = await this.hiveClient.getTask(taskId);
return {
content: [
{
type: 'text',
text: `🎯 Task **${task.id}** Details:\n\n` +
`• Type: ${task.type}\n` +
`• Priority: ${task.priority}/5\n` +
`• Status: ${task.status}\n` +
`• Assigned Agent: ${task.assigned_agent || 'Not assigned yet'}\n` +
`• Created: ${task.created_at}\n` +
`${task.completed_at ? `• Completed: ${task.completed_at}\n` : ''}` +
`${task.result ? `\n📊 Result:\n${JSON.stringify(task.result, null, 2)}` : ''}`,
},
],
};
}
private async getTasks(args: any) {
const tasks = await this.hiveClient.getTasks(args);
return {
content: [
{
type: 'text',
text: `📋 Hive Tasks (${tasks.length} found):\n\n${tasks.length > 0
? tasks.map(task =>
`🎯 **${task.id}** (${task.type})\n` +
` • Status: ${task.status}\n` +
` • Priority: ${task.priority}/5\n` +
` • Agent: ${task.assigned_agent || 'Unassigned'}\n` +
` • Created: ${task.created_at}\n`
).join('\n')
: 'No tasks found matching the criteria.'
}`,
},
],
};
}
private async getWorkflows() {
const workflows = await this.hiveClient.getWorkflows();
return {
content: [
{
type: 'text',
text: `🔄 Hive Workflows (${workflows.length} total):\n\n${workflows.length > 0
? workflows.map(wf =>
`🔄 **${wf.name || wf.id}**\n` +
` • ID: ${wf.id}\n` +
` • Description: ${wf.description || 'No description'}\n` +
` • Status: ${wf.status || 'Unknown'}\n`
).join('\n')
: 'No workflows created yet. Use hive_create_workflow to create distributed workflows.'
}`,
},
],
};
}
private async createWorkflow(args: any) {
const result = await this.hiveClient.createWorkflow(args);
return {
content: [
{
type: 'text',
text: `✅ Created workflow **${args.name}**!\n\n` +
`🔄 Workflow ID: ${result.workflow_id}\n` +
`📋 Description: ${args.description || 'No description provided'}\n` +
`🔧 Steps: ${args.steps.length} configured\n\n` +
`The workflow is ready for execution using hive_execute_workflow.`,
},
],
};
}
private async executeWorkflow(workflowId: string, inputs?: any) {
const result = await this.hiveClient.executeWorkflow(workflowId, inputs);
return {
content: [
{
type: 'text',
text: `🚀 Started workflow execution!\n\n` +
`🔄 Workflow ID: ${workflowId}\n` +
`⚡ Execution ID: ${result.execution_id}\n` +
`📥 Inputs: ${inputs ? JSON.stringify(inputs, null, 2) : 'None'}\n\n` +
`Use hive_get_executions to monitor progress.`,
},
],
};
}
private async getClusterStatus() {
const status = await this.hiveClient.getClusterStatus();
return {
content: [
{
type: 'text',
text: `🐝 **Hive Cluster Status**\n\n` +
`🟢 **System**: ${status.system.status} (v${status.system.version})\n` +
`⏱️ **Uptime**: ${Math.floor(status.system.uptime / 3600)}h ${Math.floor((status.system.uptime % 3600) / 60)}m\n\n` +
`🤖 **Agents**: ${status.agents.total} total\n` +
` • Available: ${status.agents.available}\n` +
` • Busy: ${status.agents.busy}\n\n` +
`🎯 **Tasks**: ${status.tasks.total} total\n` +
` • Pending: ${status.tasks.pending}\n` +
` • Running: ${status.tasks.running}\n` +
` • Completed: ${status.tasks.completed}\n` +
` • Failed: ${status.tasks.failed}`,
},
],
};
}
private async getMetrics() {
const metrics = await this.hiveClient.getMetrics();
return {
content: [
{
type: 'text',
text: `📊 **Hive Cluster Metrics**\n\n\`\`\`\n${metrics}\n\`\`\``,
},
],
};
}
private async getExecutions(workflowId?: string) {
const executions = await this.hiveClient.getExecutions(workflowId);
return {
content: [
{
type: 'text',
text: `⚡ Workflow Executions (${executions.length} found):\n\n${executions.length > 0
? executions.map(exec =>
`⚡ **${exec.id}**\n` +
` • Workflow: ${exec.workflow_id}\n` +
` • Status: ${exec.status}\n` +
` • Started: ${exec.started_at}\n` +
`${exec.completed_at ? ` • Completed: ${exec.completed_at}\n` : ''}`
).join('\n')
: 'No executions found.'
}`,
},
],
};
}
private async coordinateDevelopment(args: any) {
const { project_description, breakdown, coordination_strategy = 'mixed' } = args;
// Create tasks for each specialization in the breakdown
const createdTasks = [];
for (const item of breakdown) {
const taskData = {
type: item.specialization,
priority: item.priority,
context: {
objective: item.task_description,
project_context: project_description,
dependencies: item.dependencies || [],
coordination_id: uuidv4(),
},
};
const task = await this.hiveClient.createTask(taskData);
createdTasks.push(task);
}
return {
content: [
{
type: 'text',
text: `🎯 **Development Coordination Initiated**\n\n` +
`📋 **Project**: ${project_description}\n` +
`🔄 **Strategy**: ${coordination_strategy}\n` +
`🎯 **Tasks Created**: ${createdTasks.length}\n\n` +
`**Task Breakdown:**\n${createdTasks.map(task =>
`• **${task.id}** (${task.type}) - Priority ${task.priority}/5`
).join('\n')}\n\n` +
`All tasks have been queued and will be distributed to specialized agents based on availability and dependencies.`,
},
],
};
}
private async bringHiveOnline(args: any) {
const { force_refresh = false, subnet_scan = true } = args;
try {
// Get the path to the auto-discovery script
const scriptPath = path.resolve('/home/tony/AI/projects/hive/scripts/auto_discover_agents.py');
return new Promise((resolve, reject) => {
let output = '';
let errorOutput = '';
// Execute the auto-discovery script
const child = spawn('python3', [scriptPath], {
cwd: '/home/tony/AI/projects/hive',
stdio: 'pipe',
});
child.stdout.on('data', (data) => {
output += data.toString();
});
child.stderr.on('data', (data) => {
errorOutput += data.toString();
});
child.on('close', (code) => {
if (code === 0) {
// Parse the output to extract key information
const lines = output.split('\n');
const discoveredMatch = lines.find(l => l.includes('Discovered:'));
const registeredMatch = lines.find(l => l.includes('Registered:'));
const failedMatch = lines.find(l => l.includes('Failed:'));
const discovered = discoveredMatch ? discoveredMatch.split('Discovered: ')[1]?.split(' ')[0] : '0';
const registered = registeredMatch ? registeredMatch.split('Registered: ')[1]?.split(' ')[0] : '0';
const failed = failedMatch ? failedMatch.split('Failed: ')[1]?.split(' ')[0] : '0';
// Extract agent details from output
const agentLines = lines.filter(l => l.includes('•') && l.includes('models'));
const agentDetails = agentLines.map(line => {
const match = line.match(/• (.+) \((.+)\) - (\d+) models/);
return match ? `• **${match[1]}** (${match[2]}) - ${match[3]} models` : line;
});
resolve({
content: [
{
type: 'text',
text: `🐝 **Hive Cluster Online!** 🚀\n\n` +
`🔍 **Auto-Discovery Complete**\n` +
`• Discovered: ${discovered} agents\n` +
`• Registered: ${registered} agents\n` +
`• Failed: ${failed} agents\n\n` +
`🤖 **Active Agents:**\n${agentDetails.join('\n')}\n\n` +
`✅ **Status**: The Hive cluster is now fully operational and ready for distributed AI orchestration!\n\n` +
`🎯 **Next Steps:**\n` +
`• Use \`hive_get_cluster_status\` to view detailed status\n` +
`• Use \`hive_coordinate_development\` to start distributed tasks\n` +
`• Use \`hive_create_workflow\` to build complex workflows`,
},
],
});
} else {
reject(new Error(`Auto-discovery script failed with exit code ${code}. Error: ${errorOutput}`));
}
});
child.on('error', (error) => {
reject(new Error(`Failed to execute auto-discovery script: ${error.message}`));
});
});
} catch (error) {
return {
content: [
{
type: 'text',
text: `❌ **Failed to bring Hive online**\n\n` +
`Error: ${error instanceof Error ? error.message : String(error)}\n\n` +
`Please ensure:\n` +
`• The Hive backend is running\n` +
`• The auto-discovery script exists at /home/tony/AI/projects/hive/scripts/auto_discover_agents.py\n` +
`• Python3 is available and required dependencies are installed`,
},
],
isError: true,
};
}
}
}

132
mcp-server/src/index.ts Normal file
View File

@@ -0,0 +1,132 @@
#!/usr/bin/env node
/**
* Hive MCP Server
*
* Exposes the Hive Distributed AI Orchestration Platform via Model Context Protocol (MCP)
* Allows AI assistants like Claude to directly orchestrate distributed development tasks
*/
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
import {
CallToolRequestSchema,
ListResourcesRequestSchema,
ListToolsRequestSchema,
ReadResourceRequestSchema,
} from '@modelcontextprotocol/sdk/types.js';
import { HiveClient } from './hive-client.js';
import { HiveTools } from './hive-tools.js';
import { HiveResources } from './hive-resources.js';
class HiveMCPServer {
private server: Server;
private hiveClient: HiveClient;
private hiveTools: HiveTools;
private hiveResources: HiveResources;
constructor() {
this.server = new Server(
{
name: 'hive-mcp-server',
version: '1.0.0',
},
{
capabilities: {
tools: {},
resources: {},
},
}
);
// Initialize Hive client and handlers
this.hiveClient = new HiveClient();
this.hiveTools = new HiveTools(this.hiveClient);
this.hiveResources = new HiveResources(this.hiveClient);
this.setupHandlers();
}
private setupHandlers() {
// Tools handler - exposes Hive operations as MCP tools
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
return {
tools: this.hiveTools.getAllTools(),
};
});
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
return await this.hiveTools.executeTool(name, args || {});
});
// Resources handler - exposes Hive cluster state as MCP resources
this.server.setRequestHandler(ListResourcesRequestSchema, async () => {
return {
resources: await this.hiveResources.getAllResources(),
};
});
this.server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
const { uri } = request.params;
return await this.hiveResources.readResource(uri);
});
// Error handling
this.server.onerror = (error) => {
console.error('[MCP Server Error]:', error);
};
process.on('SIGINT', async () => {
await this.server.close();
process.exit(0);
});
}
async start() {
console.log('🐝 Starting Hive MCP Server...');
// Test connection to Hive backend
try {
await this.hiveClient.testConnection();
console.log('✅ Connected to Hive backend successfully');
} catch (error) {
console.error('❌ Failed to connect to Hive backend:', error);
process.exit(1);
}
// Auto-discover and register agents on startup
console.log('🔍 Auto-discovering agents...');
try {
await this.autoDiscoverAgents();
console.log('✅ Auto-discovery completed successfully');
} catch (error) {
console.warn('⚠️ Auto-discovery failed, continuing without it:', error);
}
const transport = new StdioServerTransport();
await this.server.connect(transport);
console.log('🚀 Hive MCP Server running on stdio');
console.log('🔗 AI assistants can now orchestrate your distributed cluster!');
}
private async autoDiscoverAgents() {
// Use the existing hive_bring_online functionality
const result = await this.hiveTools.executeTool('hive_bring_online', {
force_refresh: false,
subnet_scan: true
});
if (result.isError) {
throw new Error(`Auto-discovery failed: ${result.content[0]?.text || 'Unknown error'}`);
}
}
}
// Start the server
const server = new HiveMCPServer();
server.start().catch((error) => {
console.error('Failed to start Hive MCP Server:', error);
process.exit(1);
});