Initial commit: Complete Hive distributed AI orchestration platform
This comprehensive implementation includes: - FastAPI backend with MCP server integration - React/TypeScript frontend with Vite - PostgreSQL database with Redis caching - Grafana/Prometheus monitoring stack - Docker Compose orchestration - Full MCP protocol support for Claude Code integration Features: - Agent discovery and management across network - Visual workflow editor and execution engine - Real-time task coordination and monitoring - Multi-model support with specialized agents - Distributed development task allocation 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
85
mcp-server/dist/hive-client.d.ts
vendored
Normal file
85
mcp-server/dist/hive-client.d.ts
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
/**
|
||||
* Hive Client
|
||||
*
|
||||
* Handles communication with the Hive backend API
|
||||
*/
|
||||
import WebSocket from 'ws';
|
||||
export interface HiveConfig {
|
||||
baseUrl: string;
|
||||
wsUrl: string;
|
||||
timeout: number;
|
||||
}
|
||||
export interface Agent {
|
||||
id: string;
|
||||
endpoint: string;
|
||||
model: string;
|
||||
specialty: string;
|
||||
status: 'available' | 'busy' | 'offline';
|
||||
current_tasks: number;
|
||||
max_concurrent: number;
|
||||
}
|
||||
export interface Task {
|
||||
id: string;
|
||||
type: string;
|
||||
priority: number;
|
||||
context: Record<string, any>;
|
||||
status: 'pending' | 'in_progress' | 'completed' | 'failed';
|
||||
assigned_agent?: string;
|
||||
result?: Record<string, any>;
|
||||
created_at: string;
|
||||
completed_at?: string;
|
||||
}
|
||||
export interface ClusterStatus {
|
||||
system: {
|
||||
status: string;
|
||||
uptime: number;
|
||||
version: string;
|
||||
};
|
||||
agents: {
|
||||
total: number;
|
||||
available: number;
|
||||
busy: number;
|
||||
};
|
||||
tasks: {
|
||||
total: number;
|
||||
pending: number;
|
||||
running: number;
|
||||
completed: number;
|
||||
failed: number;
|
||||
};
|
||||
}
|
||||
export declare class HiveClient {
|
||||
private api;
|
||||
private config;
|
||||
private wsConnection?;
|
||||
constructor(config?: Partial<HiveConfig>);
|
||||
testConnection(): Promise<boolean>;
|
||||
getAgents(): Promise<Agent[]>;
|
||||
registerAgent(agentData: Partial<Agent>): Promise<{
|
||||
agent_id: string;
|
||||
}>;
|
||||
createTask(taskData: {
|
||||
type: string;
|
||||
priority: number;
|
||||
context: Record<string, any>;
|
||||
}): Promise<Task>;
|
||||
getTask(taskId: string): Promise<Task>;
|
||||
getTasks(filters?: {
|
||||
status?: string;
|
||||
agent?: string;
|
||||
limit?: number;
|
||||
}): Promise<Task[]>;
|
||||
getWorkflows(): Promise<any[]>;
|
||||
createWorkflow(workflowData: Record<string, any>): Promise<{
|
||||
workflow_id: string;
|
||||
}>;
|
||||
executeWorkflow(workflowId: string, inputs?: Record<string, any>): Promise<{
|
||||
execution_id: string;
|
||||
}>;
|
||||
getClusterStatus(): Promise<ClusterStatus>;
|
||||
getMetrics(): Promise<string>;
|
||||
getExecutions(workflowId?: string): Promise<any[]>;
|
||||
connectWebSocket(topic?: string): Promise<WebSocket>;
|
||||
disconnect(): Promise<void>;
|
||||
}
|
||||
//# sourceMappingURL=hive-client.d.ts.map
|
||||
1
mcp-server/dist/hive-client.d.ts.map
vendored
Normal file
1
mcp-server/dist/hive-client.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"hive-client.d.ts","sourceRoot":"","sources":["../src/hive-client.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,SAAS,MAAM,IAAI,CAAC;AAE3B,MAAM,WAAW,UAAU;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,KAAK;IACpB,EAAE,EAAE,MAAM,CAAC;IACX,QAAQ,EAAE,MAAM,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;IACzC,aAAa,EAAE,MAAM,CAAC;IACtB,cAAc,EAAE,MAAM,CAAC;CACxB;AAED,MAAM,WAAW,IAAI;IACnB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC7B,MAAM,EAAE,SAAS,GAAG,aAAa,GAAG,WAAW,GAAG,QAAQ,CAAC;IAC3D,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC7B,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,aAAa;IAC5B,MAAM,EAAE;QACN,MAAM,EAAE,MAAM,CAAC;QACf,MAAM,EAAE,MAAM,CAAC;QACf,OAAO,EAAE,MAAM,CAAC;KACjB,CAAC;IACF,MAAM,EAAE;QACN,KAAK,EAAE,MAAM,CAAC;QACd,SAAS,EAAE,MAAM,CAAC;QAClB,IAAI,EAAE,MAAM,CAAC;KACd,CAAC;IACF,KAAK,EAAE;QACL,KAAK,EAAE,MAAM,CAAC;QACd,OAAO,EAAE,MAAM,CAAC;QAChB,OAAO,EAAE,MAAM,CAAC;QAChB,SAAS,EAAE,MAAM,CAAC;QAClB,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;CACH;AAED,qBAAa,UAAU;IACrB,OAAO,CAAC,GAAG,CAAgB;IAC3B,OAAO,CAAC,MAAM,CAAa;IAC3B,OAAO,CAAC,YAAY,CAAC,CAAY;gBAErB,MAAM,CAAC,EAAE,OAAO,CAAC,UAAU,CAAC;IAiBlC,cAAc,IAAI,OAAO,CAAC,OAAO,CAAC;IAUlC,SAAS,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;IAK7B,aAAa,CAAC,SAAS,EAAE,OAAO,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC;QAAE,QAAQ,EAAE,MAAM,CAAA;KAAE,CAAC;IAMvE,UAAU,CAAC,QAAQ,EAAE;QACzB,IAAI,EAAE,MAAM,CAAC;QACb,QAAQ,EAAE,MAAM,CAAC;QACjB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;KAC9B,GAAG,OAAO,CAAC,IAAI,CAAC;IAKX,OAAO,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKtC,QAAQ,CAAC,OAAO,CAAC,EAAE;QACvB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;IAWb,YAAY,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;IAK9B,cAAc,CAAC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,OAAO,CAAC;QAAE,WAAW,EAAE,MAAM,CAAA;KAAE,CAAC;IAKnF,eAAe,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,OAAO,CAAC;QAAE,YAAY,EAAE,MAAM,CAAA;KAAE,CAAC;IAMpG,gBAAgB,IAAI,OAAO,CAAC,aAAa,CAAC;IAK1C,UAAU,IAAI,OAAO,CAAC,MAAM,CAAC;IAK7B,aAAa,CAAC,UAAU,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAOlD,gBAAgB,CAAC,KAAK,GAAE,MAAkB,GAAG,OAAO,CAAC,SAAS,CAAC;IA0B/D,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;CAMlC"}
|
||||
123
mcp-server/dist/hive-client.js
vendored
Normal file
123
mcp-server/dist/hive-client.js
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
/**
|
||||
* Hive Client
|
||||
*
|
||||
* Handles communication with the Hive backend API
|
||||
*/
|
||||
import axios from 'axios';
|
||||
import WebSocket from 'ws';
|
||||
export class HiveClient {
|
||||
api;
|
||||
config;
|
||||
wsConnection;
|
||||
constructor(config) {
|
||||
this.config = {
|
||||
baseUrl: process.env.HIVE_API_URL || 'http://localhost:8087',
|
||||
wsUrl: process.env.HIVE_WS_URL || 'ws://localhost:8087',
|
||||
timeout: 30000,
|
||||
...config,
|
||||
};
|
||||
this.api = axios.create({
|
||||
baseURL: this.config.baseUrl,
|
||||
timeout: this.config.timeout,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
async testConnection() {
|
||||
try {
|
||||
const response = await this.api.get('/health');
|
||||
return response.data.status === 'healthy';
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Failed to connect to Hive: ${error}`);
|
||||
}
|
||||
}
|
||||
// Agent Management
|
||||
async getAgents() {
|
||||
const response = await this.api.get('/api/agents');
|
||||
return response.data.agents || [];
|
||||
}
|
||||
async registerAgent(agentData) {
|
||||
const response = await this.api.post('/api/agents', agentData);
|
||||
return response.data;
|
||||
}
|
||||
// Task Management
|
||||
async createTask(taskData) {
|
||||
const response = await this.api.post('/api/tasks', taskData);
|
||||
return response.data;
|
||||
}
|
||||
async getTask(taskId) {
|
||||
const response = await this.api.get(`/api/tasks/${taskId}`);
|
||||
return response.data;
|
||||
}
|
||||
async getTasks(filters) {
|
||||
const params = new URLSearchParams();
|
||||
if (filters?.status)
|
||||
params.append('status', filters.status);
|
||||
if (filters?.agent)
|
||||
params.append('agent', filters.agent);
|
||||
if (filters?.limit)
|
||||
params.append('limit', filters.limit.toString());
|
||||
const response = await this.api.get(`/api/tasks?${params}`);
|
||||
return response.data.tasks || [];
|
||||
}
|
||||
// Workflow Management
|
||||
async getWorkflows() {
|
||||
const response = await this.api.get('/api/workflows');
|
||||
return response.data.workflows || [];
|
||||
}
|
||||
async createWorkflow(workflowData) {
|
||||
const response = await this.api.post('/api/workflows', workflowData);
|
||||
return response.data;
|
||||
}
|
||||
async executeWorkflow(workflowId, inputs) {
|
||||
const response = await this.api.post(`/api/workflows/${workflowId}/execute`, { inputs });
|
||||
return response.data;
|
||||
}
|
||||
// Monitoring and Status
|
||||
async getClusterStatus() {
|
||||
const response = await this.api.get('/api/status');
|
||||
return response.data;
|
||||
}
|
||||
async getMetrics() {
|
||||
const response = await this.api.get('/api/metrics');
|
||||
return response.data;
|
||||
}
|
||||
async getExecutions(workflowId) {
|
||||
const url = workflowId ? `/api/executions?workflow_id=${workflowId}` : '/api/executions';
|
||||
const response = await this.api.get(url);
|
||||
return response.data.executions || [];
|
||||
}
|
||||
// Real-time Updates via WebSocket
|
||||
async connectWebSocket(topic = 'general') {
|
||||
return new Promise((resolve, reject) => {
|
||||
const ws = new WebSocket(`${this.config.wsUrl}/ws/${topic}`);
|
||||
ws.on('open', () => {
|
||||
console.log(`🔗 Connected to Hive WebSocket (${topic})`);
|
||||
this.wsConnection = ws;
|
||||
resolve(ws);
|
||||
});
|
||||
ws.on('error', (error) => {
|
||||
console.error('WebSocket error:', error);
|
||||
reject(error);
|
||||
});
|
||||
ws.on('message', (data) => {
|
||||
try {
|
||||
const message = JSON.parse(data.toString());
|
||||
console.log('📨 Hive update:', message);
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Failed to parse WebSocket message:', error);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
async disconnect() {
|
||||
if (this.wsConnection) {
|
||||
this.wsConnection.close();
|
||||
this.wsConnection = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=hive-client.js.map
|
||||
1
mcp-server/dist/hive-client.js.map
vendored
Normal file
1
mcp-server/dist/hive-client.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"hive-client.js","sourceRoot":"","sources":["../src/hive-client.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAwB,MAAM,OAAO,CAAC;AAC7C,OAAO,SAAS,MAAM,IAAI,CAAC;AAkD3B,MAAM,OAAO,UAAU;IACb,GAAG,CAAgB;IACnB,MAAM,CAAa;IACnB,YAAY,CAAa;IAEjC,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG;YACZ,OAAO,EAAE,OAAO,CAAC,GAAG,CAAC,YAAY,IAAI,uBAAuB;YAC5D,KAAK,EAAE,OAAO,CAAC,GAAG,CAAC,WAAW,IAAI,qBAAqB;YACvD,OAAO,EAAE,KAAK;YACd,GAAG,MAAM;SACV,CAAC;QAEF,IAAI,CAAC,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC;YACtB,OAAO,EAAE,IAAI,CAAC,MAAM,CAAC,OAAO;YAC5B,OAAO,EAAE,IAAI,CAAC,MAAM,CAAC,OAAO;YAC5B,OAAO,EAAE;gBACP,cAAc,EAAE,kBAAkB;aACnC;SACF,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,cAAc;QAClB,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;YAC/C,OAAO,QAAQ,CAAC,IAAI,CAAC,MAAM,KAAK,SAAS,CAAC;QAC5C,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,IAAI,KAAK,CAAC,8BAA8B,KAAK,EAAE,CAAC,CAAC;QACzD,CAAC;IACH,CAAC;IAED,mBAAmB;IACnB,KAAK,CAAC,SAAS;QACb,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;QACnD,OAAO,QAAQ,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC;IACpC,CAAC;IAED,KAAK,CAAC,aAAa,CAAC,SAAyB;QAC3C,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,aAAa,EAAE,SAAS,CAAC,CAAC;QAC/D,OAAO,QAAQ,CAAC,IAAI,CAAC;IACvB,CAAC;IAED,kBAAkB;IAClB,KAAK,CAAC,UAAU,CAAC,QAIhB;QACC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QAC7D,OAAO,QAAQ,CAAC,IAAI,CAAC;IACvB,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,MAAc;QAC1B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,cAAc,MAAM,EAAE,CAAC,CAAC;QAC5D,OAAO,QAAQ,CAAC,IAAI,CAAC;IACvB,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAId;QACC,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE,CAAC;QACrC,IAAI,OAAO,EAAE,MAAM;YAAE,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC;QAC7D,IAAI,OAAO,EAAE,KAAK;YAAE,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC;QAC1D,IAAI,OAAO,EAAE,KAAK;YAAE,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;QAErE,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,cAAc,MAAM,EAAE,CAAC,CAAC;QAC5D,OAAO,QAAQ,CAAC,IAAI,CAAC,KAAK,IAAI,EAAE,CAAC;IACnC,CAAC;IAED,sBAAsB;IACtB,KAAK,CAAC,YAAY;QAChB,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC;QACtD,OAAO,QAAQ,CAAC,IAAI,CAAC,SAAS,IAAI,EAAE,CAAC;IACvC,CAAC;IAED,KAAK,CAAC,cAAc,CAAC,YAAiC;QACpD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,gBAAgB,EAAE,YAAY,CAAC,CAAC;QACrE,OAAO,QAAQ,CAAC,IAAI,CAAC;IACvB,CAAC;IAED,KAAK,CAAC,eAAe,CAAC,UAAkB,EAAE,MAA4B;QACpE,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,kBAAkB,UAAU,UAAU,EAAE,EAAE,MAAM,EAAE,CAAC,CAAC;QACzF,OAAO,QAAQ,CAAC,IAAI,CAAC;IACvB,CAAC;IAED,wBAAwB;IACxB,KAAK,CAAC,gBAAgB;QACpB,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;QACnD,OAAO,QAAQ,CAAC,IAAI,CAAC;IACvB,CAAC;IAED,KAAK,CAAC,UAAU;QACd,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QACpD,OAAO,QAAQ,CAAC,IAAI,CAAC;IACvB,CAAC;IAED,KAAK,CAAC,aAAa,CAAC,UAAmB;QACrC,MAAM,GAAG,GAAG,UAAU,CAAC,CAAC,CAAC,+BAA+B,UAAU,EAAE,CAAC,CAAC,CAAC,iBAAiB,CAAC;QACzF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACzC,OAAO,QAAQ,CAAC,IAAI,CAAC,UAAU,IAAI,EAAE,CAAC;IACxC,CAAC;IAED,kCAAkC;IAClC,KAAK,CAAC,gBAAgB,CAAC,QAAgB,SAAS;QAC9C,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,MAAM,EAAE,GAAG,IAAI,SAAS,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,OAAO,KAAK,EAAE,CAAC,CAAC;YAE7D,EAAE,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,EAAE;gBACjB,OAAO,CAAC,GAAG,CAAC,mCAAmC,KAAK,GAAG,CAAC,CAAC;gBACzD,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;gBACvB,OAAO,CAAC,EAAE,CAAC,CAAC;YACd,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;gBACvB,OAAO,CAAC,KAAK,CAAC,kBAAkB,EAAE,KAAK,CAAC,CAAC;gBACzC,MAAM,CAAC,KAAK,CAAC,CAAC;YAChB,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,EAAE,CAAC,SAAS,EAAE,CAAC,IAAI,EAAE,EAAE;gBACxB,IAAI,CAAC;oBACH,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBAC5C,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;gBAC1C,CAAC;gBAAC,OAAO,KAAK,EAAE,CAAC;oBACf,OAAO,CAAC,KAAK,CAAC,oCAAoC,EAAE,KAAK,CAAC,CAAC;gBAC7D,CAAC;YACH,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,UAAU;QACd,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YACtB,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,CAAC;YAC1B,IAAI,CAAC,YAAY,GAAG,SAAS,CAAC;QAChC,CAAC;IACH,CAAC;CACF"}
|
||||
35
mcp-server/dist/hive-resources.d.ts
vendored
Normal file
35
mcp-server/dist/hive-resources.d.ts
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
/**
|
||||
* Hive Resources
|
||||
*
|
||||
* Defines MCP resources that expose Hive cluster state and real-time data
|
||||
*/
|
||||
import { Resource } from '@modelcontextprotocol/sdk/types.js';
|
||||
import { HiveClient } from './hive-client.js';
|
||||
export declare class HiveResources {
|
||||
private hiveClient;
|
||||
constructor(hiveClient: HiveClient);
|
||||
getAllResources(): Promise<Resource[]>;
|
||||
readResource(uri: string): Promise<{
|
||||
contents: Array<{
|
||||
type: string;
|
||||
text?: string;
|
||||
data?: string;
|
||||
mimeType?: string;
|
||||
}>;
|
||||
}>;
|
||||
private getClusterStatusResource;
|
||||
private getAgentsResource;
|
||||
private getActiveTasksResource;
|
||||
private getCompletedTasksResource;
|
||||
private getWorkflowsResource;
|
||||
private getExecutionsResource;
|
||||
private getMetricsResource;
|
||||
private getCapabilitiesResource;
|
||||
private groupAgentsBySpecialty;
|
||||
private formatTaskForResource;
|
||||
private analyzeTaskQueue;
|
||||
private calculateTaskMetrics;
|
||||
private summarizeExecutionStatuses;
|
||||
private calculateDuration;
|
||||
}
|
||||
//# sourceMappingURL=hive-resources.d.ts.map
|
||||
1
mcp-server/dist/hive-resources.d.ts.map
vendored
Normal file
1
mcp-server/dist/hive-resources.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"hive-resources.d.ts","sourceRoot":"","sources":["../src/hive-resources.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,QAAQ,EAAE,MAAM,oCAAoC,CAAC;AAC9D,OAAO,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAE9C,qBAAa,aAAa;IACxB,OAAO,CAAC,UAAU,CAAa;gBAEnB,UAAU,EAAE,UAAU;IAI5B,eAAe,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;IAqDtC,YAAY,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;QAAE,QAAQ,EAAE,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,CAAC,EAAE,MAAM,CAAC;YAAC,IAAI,CAAC,EAAE,MAAM,CAAC;YAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;SAAE,CAAC,CAAA;KAAE,CAAC;YA0ClH,wBAAwB;YAcxB,iBAAiB;YAkCjB,sBAAsB;YA0BtB,yBAAyB;YA4BzB,oBAAoB;YA0BpB,qBAAqB;YA6BrB,kBAAkB;YAclB,uBAAuB;IAyDrC,OAAO,CAAC,sBAAsB;IAW9B,OAAO,CAAC,qBAAqB;IAa7B,OAAO,CAAC,gBAAgB;IAqBxB,OAAO,CAAC,oBAAoB;IAiB5B,OAAO,CAAC,0BAA0B;IAOlC,OAAO,CAAC,iBAAiB;CAM1B"}
|
||||
370
mcp-server/dist/hive-resources.js
vendored
Normal file
370
mcp-server/dist/hive-resources.js
vendored
Normal file
@@ -0,0 +1,370 @@
|
||||
/**
|
||||
* Hive Resources
|
||||
*
|
||||
* Defines MCP resources that expose Hive cluster state and real-time data
|
||||
*/
|
||||
export class HiveResources {
|
||||
hiveClient;
|
||||
constructor(hiveClient) {
|
||||
this.hiveClient = hiveClient;
|
||||
}
|
||||
async getAllResources() {
|
||||
return [
|
||||
{
|
||||
uri: 'hive://cluster/status',
|
||||
name: 'Cluster Status',
|
||||
description: 'Real-time status of the entire Hive cluster including agents and tasks',
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
{
|
||||
uri: 'hive://agents/list',
|
||||
name: 'Agent Registry',
|
||||
description: 'List of all registered AI agents with their capabilities and current status',
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
{
|
||||
uri: 'hive://tasks/active',
|
||||
name: 'Active Tasks',
|
||||
description: 'Currently running and pending tasks across the cluster',
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
{
|
||||
uri: 'hive://tasks/completed',
|
||||
name: 'Completed Tasks',
|
||||
description: 'Recently completed tasks with results and performance metrics',
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
{
|
||||
uri: 'hive://workflows/available',
|
||||
name: 'Available Workflows',
|
||||
description: 'All configured workflows ready for execution',
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
{
|
||||
uri: 'hive://executions/recent',
|
||||
name: 'Recent Executions',
|
||||
description: 'Recent workflow executions with status and results',
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
{
|
||||
uri: 'hive://metrics/prometheus',
|
||||
name: 'Cluster Metrics',
|
||||
description: 'Prometheus metrics for monitoring cluster performance',
|
||||
mimeType: 'text/plain',
|
||||
},
|
||||
{
|
||||
uri: 'hive://capabilities/overview',
|
||||
name: 'Cluster Capabilities',
|
||||
description: 'Overview of available agent types and their specializations',
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
];
|
||||
}
|
||||
async readResource(uri) {
|
||||
try {
|
||||
switch (uri) {
|
||||
case 'hive://cluster/status':
|
||||
return await this.getClusterStatusResource();
|
||||
case 'hive://agents/list':
|
||||
return await this.getAgentsResource();
|
||||
case 'hive://tasks/active':
|
||||
return await this.getActiveTasksResource();
|
||||
case 'hive://tasks/completed':
|
||||
return await this.getCompletedTasksResource();
|
||||
case 'hive://workflows/available':
|
||||
return await this.getWorkflowsResource();
|
||||
case 'hive://executions/recent':
|
||||
return await this.getExecutionsResource();
|
||||
case 'hive://metrics/prometheus':
|
||||
return await this.getMetricsResource();
|
||||
case 'hive://capabilities/overview':
|
||||
return await this.getCapabilitiesResource();
|
||||
default:
|
||||
throw new Error(`Resource not found: ${uri}`);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Error reading resource ${uri}: ${error instanceof Error ? error.message : String(error)}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
}
|
||||
async getClusterStatusResource() {
|
||||
const status = await this.hiveClient.getClusterStatus();
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
type: 'text',
|
||||
data: JSON.stringify(status, null, 2),
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getAgentsResource() {
|
||||
const agents = await this.hiveClient.getAgents();
|
||||
const agentData = {
|
||||
total_agents: agents.length,
|
||||
agents: agents.map(agent => ({
|
||||
id: agent.id,
|
||||
specialty: agent.specialty,
|
||||
model: agent.model,
|
||||
endpoint: agent.endpoint,
|
||||
status: agent.status,
|
||||
current_tasks: agent.current_tasks,
|
||||
max_concurrent: agent.max_concurrent,
|
||||
utilization: agent.max_concurrent > 0 ? (agent.current_tasks / agent.max_concurrent * 100).toFixed(1) + '%' : '0%',
|
||||
})),
|
||||
by_specialty: this.groupAgentsBySpecialty(agents),
|
||||
availability_summary: {
|
||||
available: agents.filter(a => a.status === 'available').length,
|
||||
busy: agents.filter(a => a.status === 'busy').length,
|
||||
offline: agents.filter(a => a.status === 'offline').length,
|
||||
},
|
||||
};
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
type: 'text',
|
||||
data: JSON.stringify(agentData, null, 2),
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getActiveTasksResource() {
|
||||
const pendingTasks = await this.hiveClient.getTasks({ status: 'pending', limit: 50 });
|
||||
const runningTasks = await this.hiveClient.getTasks({ status: 'in_progress', limit: 50 });
|
||||
const activeData = {
|
||||
summary: {
|
||||
pending: pendingTasks.length,
|
||||
running: runningTasks.length,
|
||||
total_active: pendingTasks.length + runningTasks.length,
|
||||
},
|
||||
pending_tasks: pendingTasks.map(this.formatTaskForResource),
|
||||
running_tasks: runningTasks.map(this.formatTaskForResource),
|
||||
queue_analysis: this.analyzeTaskQueue(pendingTasks),
|
||||
};
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
type: 'text',
|
||||
data: JSON.stringify(activeData, null, 2),
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getCompletedTasksResource() {
|
||||
const completedTasks = await this.hiveClient.getTasks({ status: 'completed', limit: 20 });
|
||||
const failedTasks = await this.hiveClient.getTasks({ status: 'failed', limit: 10 });
|
||||
const completedData = {
|
||||
summary: {
|
||||
completed: completedTasks.length,
|
||||
failed: failedTasks.length,
|
||||
success_rate: completedTasks.length + failedTasks.length > 0
|
||||
? ((completedTasks.length / (completedTasks.length + failedTasks.length)) * 100).toFixed(1) + '%'
|
||||
: 'N/A',
|
||||
},
|
||||
recent_completed: completedTasks.map(this.formatTaskForResource),
|
||||
recent_failed: failedTasks.map(this.formatTaskForResource),
|
||||
performance_metrics: this.calculateTaskMetrics(completedTasks),
|
||||
};
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
type: 'text',
|
||||
data: JSON.stringify(completedData, null, 2),
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getWorkflowsResource() {
|
||||
const workflows = await this.hiveClient.getWorkflows();
|
||||
const workflowData = {
|
||||
total_workflows: workflows.length,
|
||||
workflows: workflows.map(wf => ({
|
||||
id: wf.id,
|
||||
name: wf.name || 'Unnamed Workflow',
|
||||
description: wf.description || 'No description',
|
||||
status: wf.status || 'unknown',
|
||||
created: wf.created_at || 'unknown',
|
||||
steps: wf.steps?.length || 0,
|
||||
})),
|
||||
};
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
type: 'text',
|
||||
data: JSON.stringify(workflowData, null, 2),
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getExecutionsResource() {
|
||||
const executions = await this.hiveClient.getExecutions();
|
||||
const executionData = {
|
||||
total_executions: executions.length,
|
||||
recent_executions: executions.slice(0, 10).map(exec => ({
|
||||
id: exec.id,
|
||||
workflow_id: exec.workflow_id,
|
||||
status: exec.status,
|
||||
started_at: exec.started_at,
|
||||
completed_at: exec.completed_at,
|
||||
duration: exec.completed_at && exec.started_at
|
||||
? this.calculateDuration(exec.started_at, exec.completed_at)
|
||||
: null,
|
||||
})),
|
||||
status_summary: this.summarizeExecutionStatuses(executions),
|
||||
};
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
type: 'text',
|
||||
data: JSON.stringify(executionData, null, 2),
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getMetricsResource() {
|
||||
const metrics = await this.hiveClient.getMetrics();
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
type: 'text',
|
||||
text: metrics,
|
||||
mimeType: 'text/plain',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getCapabilitiesResource() {
|
||||
const agents = await this.hiveClient.getAgents();
|
||||
const capabilities = {
|
||||
agent_specializations: {
|
||||
kernel_dev: {
|
||||
description: 'GPU kernel development, HIP/CUDA optimization, memory coalescing',
|
||||
available_agents: agents.filter(a => a.specialty === 'kernel_dev').length,
|
||||
typical_models: ['codellama:34b', 'deepseek-coder:33b'],
|
||||
},
|
||||
pytorch_dev: {
|
||||
description: 'PyTorch backend development, autograd, TunableOp integration',
|
||||
available_agents: agents.filter(a => a.specialty === 'pytorch_dev').length,
|
||||
typical_models: ['deepseek-coder:33b', 'codellama:34b'],
|
||||
},
|
||||
profiler: {
|
||||
description: 'Performance analysis, GPU profiling, bottleneck identification',
|
||||
available_agents: agents.filter(a => a.specialty === 'profiler').length,
|
||||
typical_models: ['llama3:70b', 'mixtral:8x7b'],
|
||||
},
|
||||
docs_writer: {
|
||||
description: 'Technical documentation, API docs, tutorials, examples',
|
||||
available_agents: agents.filter(a => a.specialty === 'docs_writer').length,
|
||||
typical_models: ['llama3:70b', 'claude-3-haiku'],
|
||||
},
|
||||
tester: {
|
||||
description: 'Test creation, benchmarking, CI/CD, edge case handling',
|
||||
available_agents: agents.filter(a => a.specialty === 'tester').length,
|
||||
typical_models: ['codellama:34b', 'deepseek-coder:33b'],
|
||||
},
|
||||
},
|
||||
cluster_capacity: {
|
||||
total_agents: agents.length,
|
||||
total_concurrent_capacity: agents.reduce((sum, agent) => sum + agent.max_concurrent, 0),
|
||||
current_utilization: agents.reduce((sum, agent) => sum + agent.current_tasks, 0),
|
||||
},
|
||||
supported_frameworks: [
|
||||
'ROCm/HIP', 'PyTorch', 'CUDA', 'OpenMP', 'MPI', 'Composable Kernel'
|
||||
],
|
||||
target_architectures: [
|
||||
'RDNA3', 'CDNA3', 'RDNA2', 'Vega', 'NVIDIA GPUs (via CUDA)'
|
||||
],
|
||||
};
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
type: 'text',
|
||||
data: JSON.stringify(capabilities, null, 2),
|
||||
mimeType: 'application/json',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
// Helper Methods
|
||||
groupAgentsBySpecialty(agents) {
|
||||
const grouped = {};
|
||||
agents.forEach(agent => {
|
||||
if (!grouped[agent.specialty]) {
|
||||
grouped[agent.specialty] = [];
|
||||
}
|
||||
grouped[agent.specialty].push(agent);
|
||||
});
|
||||
return grouped;
|
||||
}
|
||||
formatTaskForResource(task) {
|
||||
return {
|
||||
id: task.id,
|
||||
type: task.type,
|
||||
priority: task.priority,
|
||||
status: task.status,
|
||||
assigned_agent: task.assigned_agent,
|
||||
created_at: task.created_at,
|
||||
completed_at: task.completed_at,
|
||||
objective: task.context?.objective || 'No objective specified',
|
||||
};
|
||||
}
|
||||
analyzeTaskQueue(tasks) {
|
||||
const byType = tasks.reduce((acc, task) => {
|
||||
acc[task.type] = (acc[task.type] || 0) + 1;
|
||||
return acc;
|
||||
}, {});
|
||||
const byPriority = tasks.reduce((acc, task) => {
|
||||
const priority = `priority_${task.priority}`;
|
||||
acc[priority] = (acc[priority] || 0) + 1;
|
||||
return acc;
|
||||
}, {});
|
||||
return {
|
||||
by_type: byType,
|
||||
by_priority: byPriority,
|
||||
average_priority: tasks.length > 0
|
||||
? (tasks.reduce((sum, task) => sum + task.priority, 0) / tasks.length).toFixed(1)
|
||||
: 0,
|
||||
};
|
||||
}
|
||||
calculateTaskMetrics(tasks) {
|
||||
if (tasks.length === 0)
|
||||
return null;
|
||||
const durations = tasks
|
||||
.filter(task => task.created_at && task.completed_at)
|
||||
.map(task => new Date(task.completed_at).getTime() - new Date(task.created_at).getTime());
|
||||
if (durations.length === 0)
|
||||
return null;
|
||||
return {
|
||||
average_duration_ms: Math.round(durations.reduce((a, b) => a + b, 0) / durations.length),
|
||||
min_duration_ms: Math.min(...durations),
|
||||
max_duration_ms: Math.max(...durations),
|
||||
total_tasks_analyzed: durations.length,
|
||||
};
|
||||
}
|
||||
summarizeExecutionStatuses(executions) {
|
||||
return executions.reduce((acc, exec) => {
|
||||
acc[exec.status] = (acc[exec.status] || 0) + 1;
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
calculateDuration(start, end) {
|
||||
const duration = new Date(end).getTime() - new Date(start).getTime();
|
||||
const minutes = Math.floor(duration / 60000);
|
||||
const seconds = Math.floor((duration % 60000) / 1000);
|
||||
return `${minutes}m ${seconds}s`;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=hive-resources.js.map
|
||||
1
mcp-server/dist/hive-resources.js.map
vendored
Normal file
1
mcp-server/dist/hive-resources.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
27
mcp-server/dist/hive-tools.d.ts
vendored
Normal file
27
mcp-server/dist/hive-tools.d.ts
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* Hive Tools
|
||||
*
|
||||
* Defines MCP tools that expose Hive operations to AI assistants
|
||||
*/
|
||||
import { Tool } from '@modelcontextprotocol/sdk/types.js';
|
||||
import { HiveClient } from './hive-client.js';
|
||||
export declare class HiveTools {
|
||||
private hiveClient;
|
||||
constructor(hiveClient: HiveClient);
|
||||
getAllTools(): Tool[];
|
||||
executeTool(name: string, args: Record<string, any>): Promise<any>;
|
||||
private getAgents;
|
||||
private registerAgent;
|
||||
private createTask;
|
||||
private getTask;
|
||||
private getTasks;
|
||||
private getWorkflows;
|
||||
private createWorkflow;
|
||||
private executeWorkflow;
|
||||
private getClusterStatus;
|
||||
private getMetrics;
|
||||
private getExecutions;
|
||||
private coordinateDevelopment;
|
||||
private bringHiveOnline;
|
||||
}
|
||||
//# sourceMappingURL=hive-tools.d.ts.map
|
||||
1
mcp-server/dist/hive-tools.d.ts.map
vendored
Normal file
1
mcp-server/dist/hive-tools.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"hive-tools.d.ts","sourceRoot":"","sources":["../src/hive-tools.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,IAAI,EAAE,MAAM,oCAAoC,CAAC;AAC1D,OAAO,EAAE,UAAU,EAAe,MAAM,kBAAkB,CAAC;AAM3D,qBAAa,SAAS;IACpB,OAAO,CAAC,UAAU,CAAa;gBAEnB,UAAU,EAAE,UAAU;IAIlC,WAAW,IAAI,IAAI,EAAE;IAmOf,WAAW,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC;YAkE1D,SAAS;YAqBT,aAAa;YAkBb,UAAU;YA6BV,OAAO;YAoBP,QAAQ;YAsBR,YAAY;YAqBZ,cAAc;YAiBd,eAAe;YAiBf,gBAAgB;YAuBhB,UAAU;YAaV,aAAa;YAsBb,qBAAqB;YAuCrB,eAAe;CAyF9B"}
|
||||
590
mcp-server/dist/hive-tools.js
vendored
Normal file
590
mcp-server/dist/hive-tools.js
vendored
Normal file
@@ -0,0 +1,590 @@
|
||||
/**
|
||||
* Hive Tools
|
||||
*
|
||||
* Defines MCP tools that expose Hive operations to AI assistants
|
||||
*/
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { spawn } from 'child_process';
|
||||
import * as path from 'path';
|
||||
export class HiveTools {
|
||||
hiveClient;
|
||||
constructor(hiveClient) {
|
||||
this.hiveClient = hiveClient;
|
||||
}
|
||||
getAllTools() {
|
||||
return [
|
||||
// Agent Management Tools
|
||||
{
|
||||
name: 'hive_get_agents',
|
||||
description: 'Get all registered AI agents in the Hive cluster with their current status',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'hive_register_agent',
|
||||
description: 'Register a new AI agent in the Hive cluster',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Unique agent identifier' },
|
||||
endpoint: { type: 'string', description: 'Agent API endpoint URL' },
|
||||
model: { type: 'string', description: 'Model name (e.g., codellama:34b)' },
|
||||
specialty: {
|
||||
type: 'string',
|
||||
enum: ['kernel_dev', 'pytorch_dev', 'profiler', 'docs_writer', 'tester'],
|
||||
description: 'Agent specialization area'
|
||||
},
|
||||
max_concurrent: { type: 'number', description: 'Maximum concurrent tasks', default: 2 },
|
||||
},
|
||||
required: ['id', 'endpoint', 'model', 'specialty'],
|
||||
},
|
||||
},
|
||||
// Task Management Tools
|
||||
{
|
||||
name: 'hive_create_task',
|
||||
description: 'Create and assign a development task to the Hive cluster',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: {
|
||||
type: 'string',
|
||||
enum: ['kernel_dev', 'pytorch_dev', 'profiler', 'docs_writer', 'tester'],
|
||||
description: 'Type of development task'
|
||||
},
|
||||
priority: {
|
||||
type: 'number',
|
||||
minimum: 1,
|
||||
maximum: 5,
|
||||
description: 'Task priority (1=low, 5=high)'
|
||||
},
|
||||
objective: { type: 'string', description: 'Main objective or goal of the task' },
|
||||
context: {
|
||||
type: 'object',
|
||||
description: 'Additional context, files, constraints, requirements',
|
||||
properties: {
|
||||
files: { type: 'array', items: { type: 'string' }, description: 'Related file paths' },
|
||||
constraints: { type: 'array', items: { type: 'string' }, description: 'Development constraints' },
|
||||
requirements: { type: 'array', items: { type: 'string' }, description: 'Specific requirements' },
|
||||
reference: { type: 'string', description: 'Reference documentation or links' }
|
||||
}
|
||||
},
|
||||
},
|
||||
required: ['type', 'priority', 'objective'],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'hive_get_task',
|
||||
description: 'Get details and status of a specific task',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
task_id: { type: 'string', description: 'Task identifier' },
|
||||
},
|
||||
required: ['task_id'],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'hive_get_tasks',
|
||||
description: 'Get list of tasks with optional filtering',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
status: {
|
||||
type: 'string',
|
||||
enum: ['pending', 'in_progress', 'completed', 'failed'],
|
||||
description: 'Filter by task status'
|
||||
},
|
||||
agent: { type: 'string', description: 'Filter by assigned agent ID' },
|
||||
limit: { type: 'number', description: 'Maximum number of tasks to return', default: 20 },
|
||||
},
|
||||
},
|
||||
},
|
||||
// Workflow Management Tools
|
||||
{
|
||||
name: 'hive_get_workflows',
|
||||
description: 'Get all available workflows in the Hive platform',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'hive_create_workflow',
|
||||
description: 'Create a new workflow for distributed task orchestration',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string', description: 'Workflow name' },
|
||||
description: { type: 'string', description: 'Workflow description' },
|
||||
steps: {
|
||||
type: 'array',
|
||||
description: 'Workflow steps in order',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string' },
|
||||
type: { type: 'string' },
|
||||
agent_type: { type: 'string' },
|
||||
inputs: { type: 'object' },
|
||||
outputs: { type: 'array', items: { type: 'string' } }
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
required: ['name', 'steps'],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'hive_execute_workflow',
|
||||
description: 'Execute a workflow with optional input parameters',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workflow_id: { type: 'string', description: 'Workflow identifier' },
|
||||
inputs: {
|
||||
type: 'object',
|
||||
description: 'Input parameters for workflow execution',
|
||||
additionalProperties: true
|
||||
},
|
||||
},
|
||||
required: ['workflow_id'],
|
||||
},
|
||||
},
|
||||
// Monitoring and Status Tools
|
||||
{
|
||||
name: 'hive_get_cluster_status',
|
||||
description: 'Get comprehensive status of the entire Hive cluster',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'hive_get_metrics',
|
||||
description: 'Get Prometheus metrics from the Hive cluster',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'hive_get_executions',
|
||||
description: 'Get workflow execution history and status',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workflow_id: { type: 'string', description: 'Filter by specific workflow ID' },
|
||||
},
|
||||
},
|
||||
},
|
||||
// Coordination Tools
|
||||
{
|
||||
name: 'hive_coordinate_development',
|
||||
description: 'Coordinate a complex development task across multiple specialized agents',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
project_description: { type: 'string', description: 'Overall project or feature description' },
|
||||
breakdown: {
|
||||
type: 'array',
|
||||
description: 'Task breakdown by specialization',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
specialization: { type: 'string', enum: ['kernel_dev', 'pytorch_dev', 'profiler', 'docs_writer', 'tester'] },
|
||||
task_description: { type: 'string' },
|
||||
dependencies: { type: 'array', items: { type: 'string' } },
|
||||
priority: { type: 'number', minimum: 1, maximum: 5 }
|
||||
}
|
||||
}
|
||||
},
|
||||
coordination_strategy: {
|
||||
type: 'string',
|
||||
enum: ['sequential', 'parallel', 'mixed'],
|
||||
description: 'How to coordinate the tasks',
|
||||
default: 'mixed'
|
||||
},
|
||||
},
|
||||
required: ['project_description', 'breakdown'],
|
||||
},
|
||||
},
|
||||
// Cluster Management Tools
|
||||
{
|
||||
name: 'hive_bring_online',
|
||||
description: 'Automatically discover and register all available Ollama agents on the network, bringing the entire Hive cluster online',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
force_refresh: {
|
||||
type: 'boolean',
|
||||
description: 'Force refresh of all agents (re-register existing ones)',
|
||||
default: false
|
||||
},
|
||||
subnet_scan: {
|
||||
type: 'boolean',
|
||||
description: 'Perform full subnet scan for discovery',
|
||||
default: true
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
async executeTool(name, args) {
|
||||
try {
|
||||
switch (name) {
|
||||
// Agent Management
|
||||
case 'hive_get_agents':
|
||||
return await this.getAgents();
|
||||
case 'hive_register_agent':
|
||||
return await this.registerAgent(args);
|
||||
// Task Management
|
||||
case 'hive_create_task':
|
||||
return await this.createTask(args);
|
||||
case 'hive_get_task':
|
||||
return await this.getTask(args.task_id);
|
||||
case 'hive_get_tasks':
|
||||
return await this.getTasks(args);
|
||||
// Workflow Management
|
||||
case 'hive_get_workflows':
|
||||
return await this.getWorkflows();
|
||||
case 'hive_create_workflow':
|
||||
return await this.createWorkflow(args);
|
||||
case 'hive_execute_workflow':
|
||||
return await this.executeWorkflow(args.workflow_id, args.inputs);
|
||||
// Monitoring
|
||||
case 'hive_get_cluster_status':
|
||||
return await this.getClusterStatus();
|
||||
case 'hive_get_metrics':
|
||||
return await this.getMetrics();
|
||||
case 'hive_get_executions':
|
||||
return await this.getExecutions(args.workflow_id);
|
||||
// Coordination
|
||||
case 'hive_coordinate_development':
|
||||
return await this.coordinateDevelopment(args);
|
||||
// Cluster Management
|
||||
case 'hive_bring_online':
|
||||
return await this.bringHiveOnline(args);
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Error executing ${name}: ${error instanceof Error ? error.message : String(error)}`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
// Tool Implementation Methods
|
||||
async getAgents() {
|
||||
const agents = await this.hiveClient.getAgents();
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `📋 Hive Cluster Agents (${agents.length} total):\n\n${agents.length > 0
|
||||
? agents.map(agent => `🤖 **${agent.id}** (${agent.specialty})\n` +
|
||||
` • Model: ${agent.model}\n` +
|
||||
` • Endpoint: ${agent.endpoint}\n` +
|
||||
` • Status: ${agent.status}\n` +
|
||||
` • Tasks: ${agent.current_tasks}/${agent.max_concurrent}\n`).join('\n')
|
||||
: 'No agents registered yet. Use hive_register_agent to add agents to the cluster.'}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async registerAgent(args) {
|
||||
const result = await this.hiveClient.registerAgent(args);
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `✅ Successfully registered agent **${args.id}** in the Hive cluster!\n\n` +
|
||||
`🤖 Agent Details:\n` +
|
||||
`• ID: ${args.id}\n` +
|
||||
`• Specialization: ${args.specialty}\n` +
|
||||
`• Model: ${args.model}\n` +
|
||||
`• Endpoint: ${args.endpoint}\n` +
|
||||
`• Max Concurrent Tasks: ${args.max_concurrent || 2}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async createTask(args) {
|
||||
const taskData = {
|
||||
type: args.type,
|
||||
priority: args.priority,
|
||||
context: {
|
||||
objective: args.objective,
|
||||
...args.context,
|
||||
},
|
||||
};
|
||||
const task = await this.hiveClient.createTask(taskData);
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `🎯 Created development task **${task.id}**\n\n` +
|
||||
`📋 Task Details:\n` +
|
||||
`• Type: ${task.type}\n` +
|
||||
`• Priority: ${task.priority}/5\n` +
|
||||
`• Status: ${task.status}\n` +
|
||||
`• Objective: ${args.objective}\n` +
|
||||
`• Created: ${task.created_at}\n\n` +
|
||||
`The task has been queued and will be assigned to an available ${task.type} agent.`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getTask(taskId) {
|
||||
const task = await this.hiveClient.getTask(taskId);
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `🎯 Task **${task.id}** Details:\n\n` +
|
||||
`• Type: ${task.type}\n` +
|
||||
`• Priority: ${task.priority}/5\n` +
|
||||
`• Status: ${task.status}\n` +
|
||||
`• Assigned Agent: ${task.assigned_agent || 'Not assigned yet'}\n` +
|
||||
`• Created: ${task.created_at}\n` +
|
||||
`${task.completed_at ? `• Completed: ${task.completed_at}\n` : ''}` +
|
||||
`${task.result ? `\n📊 Result:\n${JSON.stringify(task.result, null, 2)}` : ''}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getTasks(args) {
|
||||
const tasks = await this.hiveClient.getTasks(args);
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `📋 Hive Tasks (${tasks.length} found):\n\n${tasks.length > 0
|
||||
? tasks.map(task => `🎯 **${task.id}** (${task.type})\n` +
|
||||
` • Status: ${task.status}\n` +
|
||||
` • Priority: ${task.priority}/5\n` +
|
||||
` • Agent: ${task.assigned_agent || 'Unassigned'}\n` +
|
||||
` • Created: ${task.created_at}\n`).join('\n')
|
||||
: 'No tasks found matching the criteria.'}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getWorkflows() {
|
||||
const workflows = await this.hiveClient.getWorkflows();
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `🔄 Hive Workflows (${workflows.length} total):\n\n${workflows.length > 0
|
||||
? workflows.map(wf => `🔄 **${wf.name || wf.id}**\n` +
|
||||
` • ID: ${wf.id}\n` +
|
||||
` • Description: ${wf.description || 'No description'}\n` +
|
||||
` • Status: ${wf.status || 'Unknown'}\n`).join('\n')
|
||||
: 'No workflows created yet. Use hive_create_workflow to create distributed workflows.'}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async createWorkflow(args) {
|
||||
const result = await this.hiveClient.createWorkflow(args);
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `✅ Created workflow **${args.name}**!\n\n` +
|
||||
`🔄 Workflow ID: ${result.workflow_id}\n` +
|
||||
`📋 Description: ${args.description || 'No description provided'}\n` +
|
||||
`🔧 Steps: ${args.steps.length} configured\n\n` +
|
||||
`The workflow is ready for execution using hive_execute_workflow.`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async executeWorkflow(workflowId, inputs) {
|
||||
const result = await this.hiveClient.executeWorkflow(workflowId, inputs);
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `🚀 Started workflow execution!\n\n` +
|
||||
`🔄 Workflow ID: ${workflowId}\n` +
|
||||
`⚡ Execution ID: ${result.execution_id}\n` +
|
||||
`📥 Inputs: ${inputs ? JSON.stringify(inputs, null, 2) : 'None'}\n\n` +
|
||||
`Use hive_get_executions to monitor progress.`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getClusterStatus() {
|
||||
const status = await this.hiveClient.getClusterStatus();
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `🐝 **Hive Cluster Status**\n\n` +
|
||||
`🟢 **System**: ${status.system.status} (v${status.system.version})\n` +
|
||||
`⏱️ **Uptime**: ${Math.floor(status.system.uptime / 3600)}h ${Math.floor((status.system.uptime % 3600) / 60)}m\n\n` +
|
||||
`🤖 **Agents**: ${status.agents.total} total\n` +
|
||||
` • Available: ${status.agents.available}\n` +
|
||||
` • Busy: ${status.agents.busy}\n\n` +
|
||||
`🎯 **Tasks**: ${status.tasks.total} total\n` +
|
||||
` • Pending: ${status.tasks.pending}\n` +
|
||||
` • Running: ${status.tasks.running}\n` +
|
||||
` • Completed: ${status.tasks.completed}\n` +
|
||||
` • Failed: ${status.tasks.failed}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getMetrics() {
|
||||
const metrics = await this.hiveClient.getMetrics();
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `📊 **Hive Cluster Metrics**\n\n\`\`\`\n${metrics}\n\`\`\``,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async getExecutions(workflowId) {
|
||||
const executions = await this.hiveClient.getExecutions(workflowId);
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `⚡ Workflow Executions (${executions.length} found):\n\n${executions.length > 0
|
||||
? executions.map(exec => `⚡ **${exec.id}**\n` +
|
||||
` • Workflow: ${exec.workflow_id}\n` +
|
||||
` • Status: ${exec.status}\n` +
|
||||
` • Started: ${exec.started_at}\n` +
|
||||
`${exec.completed_at ? ` • Completed: ${exec.completed_at}\n` : ''}`).join('\n')
|
||||
: 'No executions found.'}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async coordinateDevelopment(args) {
|
||||
const { project_description, breakdown, coordination_strategy = 'mixed' } = args;
|
||||
// Create tasks for each specialization in the breakdown
|
||||
const createdTasks = [];
|
||||
for (const item of breakdown) {
|
||||
const taskData = {
|
||||
type: item.specialization,
|
||||
priority: item.priority,
|
||||
context: {
|
||||
objective: item.task_description,
|
||||
project_context: project_description,
|
||||
dependencies: item.dependencies || [],
|
||||
coordination_id: uuidv4(),
|
||||
},
|
||||
};
|
||||
const task = await this.hiveClient.createTask(taskData);
|
||||
createdTasks.push(task);
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `🎯 **Development Coordination Initiated**\n\n` +
|
||||
`📋 **Project**: ${project_description}\n` +
|
||||
`🔄 **Strategy**: ${coordination_strategy}\n` +
|
||||
`🎯 **Tasks Created**: ${createdTasks.length}\n\n` +
|
||||
`**Task Breakdown:**\n${createdTasks.map(task => `• **${task.id}** (${task.type}) - Priority ${task.priority}/5`).join('\n')}\n\n` +
|
||||
`All tasks have been queued and will be distributed to specialized agents based on availability and dependencies.`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
async bringHiveOnline(args) {
|
||||
const { force_refresh = false, subnet_scan = true } = args;
|
||||
try {
|
||||
// Get the path to the auto-discovery script
|
||||
const scriptPath = path.resolve('/home/tony/AI/projects/hive/scripts/auto_discover_agents.py');
|
||||
return new Promise((resolve, reject) => {
|
||||
let output = '';
|
||||
let errorOutput = '';
|
||||
// Execute the auto-discovery script
|
||||
const child = spawn('python3', [scriptPath], {
|
||||
cwd: '/home/tony/AI/projects/hive',
|
||||
stdio: 'pipe',
|
||||
});
|
||||
child.stdout.on('data', (data) => {
|
||||
output += data.toString();
|
||||
});
|
||||
child.stderr.on('data', (data) => {
|
||||
errorOutput += data.toString();
|
||||
});
|
||||
child.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
// Parse the output to extract key information
|
||||
const lines = output.split('\n');
|
||||
const discoveredMatch = lines.find(l => l.includes('Discovered:'));
|
||||
const registeredMatch = lines.find(l => l.includes('Registered:'));
|
||||
const failedMatch = lines.find(l => l.includes('Failed:'));
|
||||
const discovered = discoveredMatch ? discoveredMatch.split('Discovered: ')[1]?.split(' ')[0] : '0';
|
||||
const registered = registeredMatch ? registeredMatch.split('Registered: ')[1]?.split(' ')[0] : '0';
|
||||
const failed = failedMatch ? failedMatch.split('Failed: ')[1]?.split(' ')[0] : '0';
|
||||
// Extract agent details from output
|
||||
const agentLines = lines.filter(l => l.includes('•') && l.includes('models'));
|
||||
const agentDetails = agentLines.map(line => {
|
||||
const match = line.match(/• (.+) \((.+)\) - (\d+) models/);
|
||||
return match ? `• **${match[1]}** (${match[2]}) - ${match[3]} models` : line;
|
||||
});
|
||||
resolve({
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `🐝 **Hive Cluster Online!** 🚀\n\n` +
|
||||
`🔍 **Auto-Discovery Complete**\n` +
|
||||
`• Discovered: ${discovered} agents\n` +
|
||||
`• Registered: ${registered} agents\n` +
|
||||
`• Failed: ${failed} agents\n\n` +
|
||||
`🤖 **Active Agents:**\n${agentDetails.join('\n')}\n\n` +
|
||||
`✅ **Status**: The Hive cluster is now fully operational and ready for distributed AI orchestration!\n\n` +
|
||||
`🎯 **Next Steps:**\n` +
|
||||
`• Use \`hive_get_cluster_status\` to view detailed status\n` +
|
||||
`• Use \`hive_coordinate_development\` to start distributed tasks\n` +
|
||||
`• Use \`hive_create_workflow\` to build complex workflows`,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
else {
|
||||
reject(new Error(`Auto-discovery script failed with exit code ${code}. Error: ${errorOutput}`));
|
||||
}
|
||||
});
|
||||
child.on('error', (error) => {
|
||||
reject(new Error(`Failed to execute auto-discovery script: ${error.message}`));
|
||||
});
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `❌ **Failed to bring Hive online**\n\n` +
|
||||
`Error: ${error instanceof Error ? error.message : String(error)}\n\n` +
|
||||
`Please ensure:\n` +
|
||||
`• The Hive backend is running\n` +
|
||||
`• The auto-discovery script exists at /home/tony/AI/projects/hive/scripts/auto_discover_agents.py\n` +
|
||||
`• Python3 is available and required dependencies are installed`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=hive-tools.js.map
|
||||
1
mcp-server/dist/hive-tools.js.map
vendored
Normal file
1
mcp-server/dist/hive-tools.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
9
mcp-server/dist/index.d.ts
vendored
Normal file
9
mcp-server/dist/index.d.ts
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Hive MCP Server
|
||||
*
|
||||
* Exposes the Hive Distributed AI Orchestration Platform via Model Context Protocol (MCP)
|
||||
* Allows AI assistants like Claude to directly orchestrate distributed development tasks
|
||||
*/
|
||||
export {};
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
1
mcp-server/dist/index.d.ts.map
vendored
Normal file
1
mcp-server/dist/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAEA;;;;;GAKG"}
|
||||
107
mcp-server/dist/index.js
vendored
Normal file
107
mcp-server/dist/index.js
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Hive MCP Server
|
||||
*
|
||||
* Exposes the Hive Distributed AI Orchestration Platform via Model Context Protocol (MCP)
|
||||
* Allows AI assistants like Claude to directly orchestrate distributed development tasks
|
||||
*/
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
|
||||
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
||||
import { CallToolRequestSchema, ListResourcesRequestSchema, ListToolsRequestSchema, ReadResourceRequestSchema, } from '@modelcontextprotocol/sdk/types.js';
|
||||
import { HiveClient } from './hive-client.js';
|
||||
import { HiveTools } from './hive-tools.js';
|
||||
import { HiveResources } from './hive-resources.js';
|
||||
class HiveMCPServer {
|
||||
server;
|
||||
hiveClient;
|
||||
hiveTools;
|
||||
hiveResources;
|
||||
constructor() {
|
||||
this.server = new Server({
|
||||
name: 'hive-mcp-server',
|
||||
version: '1.0.0',
|
||||
}, {
|
||||
capabilities: {
|
||||
tools: {},
|
||||
resources: {},
|
||||
},
|
||||
});
|
||||
// Initialize Hive client and handlers
|
||||
this.hiveClient = new HiveClient();
|
||||
this.hiveTools = new HiveTools(this.hiveClient);
|
||||
this.hiveResources = new HiveResources(this.hiveClient);
|
||||
this.setupHandlers();
|
||||
}
|
||||
setupHandlers() {
|
||||
// Tools handler - exposes Hive operations as MCP tools
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: this.hiveTools.getAllTools(),
|
||||
};
|
||||
});
|
||||
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
const { name, arguments: args } = request.params;
|
||||
return await this.hiveTools.executeTool(name, args || {});
|
||||
});
|
||||
// Resources handler - exposes Hive cluster state as MCP resources
|
||||
this.server.setRequestHandler(ListResourcesRequestSchema, async () => {
|
||||
return {
|
||||
resources: await this.hiveResources.getAllResources(),
|
||||
};
|
||||
});
|
||||
this.server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
|
||||
const { uri } = request.params;
|
||||
return await this.hiveResources.readResource(uri);
|
||||
});
|
||||
// Error handling
|
||||
this.server.onerror = (error) => {
|
||||
console.error('[MCP Server Error]:', error);
|
||||
};
|
||||
process.on('SIGINT', async () => {
|
||||
await this.server.close();
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
async start() {
|
||||
console.log('🐝 Starting Hive MCP Server...');
|
||||
// Test connection to Hive backend
|
||||
try {
|
||||
await this.hiveClient.testConnection();
|
||||
console.log('✅ Connected to Hive backend successfully');
|
||||
}
|
||||
catch (error) {
|
||||
console.error('❌ Failed to connect to Hive backend:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
// Auto-discover and register agents on startup
|
||||
console.log('🔍 Auto-discovering agents...');
|
||||
try {
|
||||
await this.autoDiscoverAgents();
|
||||
console.log('✅ Auto-discovery completed successfully');
|
||||
}
|
||||
catch (error) {
|
||||
console.warn('⚠️ Auto-discovery failed, continuing without it:', error);
|
||||
}
|
||||
const transport = new StdioServerTransport();
|
||||
await this.server.connect(transport);
|
||||
console.log('🚀 Hive MCP Server running on stdio');
|
||||
console.log('🔗 AI assistants can now orchestrate your distributed cluster!');
|
||||
}
|
||||
async autoDiscoverAgents() {
|
||||
// Use the existing hive_bring_online functionality
|
||||
const result = await this.hiveTools.executeTool('hive_bring_online', {
|
||||
force_refresh: false,
|
||||
subnet_scan: true
|
||||
});
|
||||
if (result.isError) {
|
||||
throw new Error(`Auto-discovery failed: ${result.content[0]?.text || 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Start the server
|
||||
const server = new HiveMCPServer();
|
||||
server.start().catch((error) => {
|
||||
console.error('Failed to start Hive MCP Server:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
mcp-server/dist/index.js.map
vendored
Normal file
1
mcp-server/dist/index.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAEA;;;;;GAKG;AAEH,OAAO,EAAE,MAAM,EAAE,MAAM,2CAA2C,CAAC;AACnE,OAAO,EAAE,oBAAoB,EAAE,MAAM,2CAA2C,CAAC;AACjF,OAAO,EACL,qBAAqB,EACrB,0BAA0B,EAC1B,sBAAsB,EACtB,yBAAyB,GAC1B,MAAM,oCAAoC,CAAC;AAC5C,OAAO,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAC9C,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAC5C,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAC;AAEpD,MAAM,aAAa;IACT,MAAM,CAAS;IACf,UAAU,CAAa;IACvB,SAAS,CAAY;IACrB,aAAa,CAAgB;IAErC;QACE,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CACtB;YACE,IAAI,EAAE,iBAAiB;YACvB,OAAO,EAAE,OAAO;SACjB,EACD;YACE,YAAY,EAAE;gBACZ,KAAK,EAAE,EAAE;gBACT,SAAS,EAAE,EAAE;aACd;SACF,CACF,CAAC;QAEF,sCAAsC;QACtC,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;QACnC,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QAChD,IAAI,CAAC,aAAa,GAAG,IAAI,aAAa,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QAExD,IAAI,CAAC,aAAa,EAAE,CAAC;IACvB,CAAC;IAEO,aAAa;QACnB,uDAAuD;QACvD,IAAI,CAAC,MAAM,CAAC,iBAAiB,CAAC,sBAAsB,EAAE,KAAK,IAAI,EAAE;YAC/D,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,WAAW,EAAE;aACpC,CAAC;QACJ,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,MAAM,CAAC,iBAAiB,CAAC,qBAAqB,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE;YACrE,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,MAAM,CAAC;YACjD,OAAO,MAAM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,EAAE,IAAI,IAAI,EAAE,CAAC,CAAC;QAC5D,CAAC,CAAC,CAAC;QAEH,kEAAkE;QAClE,IAAI,CAAC,MAAM,CAAC,iBAAiB,CAAC,0BAA0B,EAAE,KAAK,IAAI,EAAE;YACnE,OAAO;gBACL,SAAS,EAAE,MAAM,IAAI,CAAC,aAAa,CAAC,eAAe,EAAE;aACtD,CAAC;QACJ,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,MAAM,CAAC,iBAAiB,CAAC,yBAAyB,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE;YACzE,MAAM,EAAE,GAAG,EAAE,GAAG,OAAO,CAAC,MAAM,CAAC;YAC/B,OAAO,MAAM,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC;QACpD,CAAC,CAAC,CAAC;QAEH,iBAAiB;QACjB,IAAI,CAAC,MAAM,CAAC,OAAO,GAAG,CAAC,KAAK,EAAE,EAAE;YAC9B,OAAO,CAAC,KAAK,CAAC,qBAAqB,EAAE,KAAK,CAAC,CAAC;QAC9C,CAAC,CAAC;QAEF,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;YAC9B,MAAM,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;YAC1B,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClB,CAAC,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,KAAK;QACT,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAC;QAE9C,kCAAkC;QAClC,IAAI,CAAC;YACH,MAAM,IAAI,CAAC,UAAU,CAAC,cAAc,EAAE,CAAC;YACvC,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,CAAC;QAC1D,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,OAAO,CAAC,KAAK,CAAC,sCAAsC,EAAE,KAAK,CAAC,CAAC;YAC7D,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClB,CAAC;QAED,+CAA+C;QAC/C,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,CAAC;QAC7C,IAAI,CAAC;YACH,MAAM,IAAI,CAAC,kBAAkB,EAAE,CAAC;YAChC,OAAO,CAAC,GAAG,CAAC,yCAAyC,CAAC,CAAC;QACzD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,OAAO,CAAC,IAAI,CAAC,mDAAmD,EAAE,KAAK,CAAC,CAAC;QAC3E,CAAC;QAED,MAAM,SAAS,GAAG,IAAI,oBAAoB,EAAE,CAAC;QAC7C,MAAM,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;QAErC,OAAO,CAAC,GAAG,CAAC,qCAAqC,CAAC,CAAC;QACnD,OAAO,CAAC,GAAG,CAAC,gEAAgE,CAAC,CAAC;IAChF,CAAC;IAEO,KAAK,CAAC,kBAAkB;QAC9B,mDAAmD;QACnD,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,mBAAmB,EAAE;YACnE,aAAa,EAAE,KAAK;YACpB,WAAW,EAAE,IAAI;SAClB,CAAC,CAAC;QAEH,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;YACnB,MAAM,IAAI,KAAK,CAAC,0BAA0B,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,IAAI,IAAI,eAAe,EAAE,CAAC,CAAC;QAC1F,CAAC;IACH,CAAC;CACF;AAED,mBAAmB;AACnB,MAAM,MAAM,GAAG,IAAI,aAAa,EAAE,CAAC;AACnC,MAAM,CAAC,KAAK,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;IAC7B,OAAO,CAAC,KAAK,CAAC,kCAAkC,EAAE,KAAK,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC,CAAC,CAAC"}
|
||||
Reference in New Issue
Block a user