Major WHOOSH system refactoring and feature enhancements
- Migrated from HIVE branding to WHOOSH across all components - Enhanced backend API with new services: AI models, BZZZ integration, templates, members - Added comprehensive testing suite with security, performance, and integration tests - Improved frontend with new components for project setup, AI models, and team management - Updated MCP server implementation with WHOOSH-specific tools and resources - Enhanced deployment configurations with production-ready Docker setups - Added comprehensive documentation and setup guides - Implemented age encryption service and UCXL integration 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
BIN
backend/app/api/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/agents.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/agents.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/agents.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/agents.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/ai_models.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/ai_models.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/auth.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/auth.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/auth.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/auth.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/bzzz_integration.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/bzzz_integration.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/bzzz_logs.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/bzzz_logs.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/bzzz_logs.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/bzzz_logs.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/cli_agents.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/cli_agents.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/cli_agents.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/cli_agents.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/cluster.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/cluster.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/cluster.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/cluster.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/cluster_registration.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/cluster_registration.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/cluster_registration.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/cluster_registration.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/cluster_setup.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/cluster_setup.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
backend/app/api/__pycache__/executions.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/executions.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/executions.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/executions.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/git_repositories.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/git_repositories.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/members.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/members.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/members.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/members.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/monitoring.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/monitoring.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/monitoring.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/monitoring.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/projects.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/projects.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/projects.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/projects.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/tasks.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/tasks.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/tasks.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/tasks.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/templates.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/templates.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/templates.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/templates.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/ucxl_integration.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/ucxl_integration.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/workflows.cpython-310.pyc
Normal file
BIN
backend/app/api/__pycache__/workflows.cpython-310.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/workflows.cpython-312.pyc
Normal file
BIN
backend/app/api/__pycache__/workflows.cpython-312.pyc
Normal file
Binary file not shown.
@@ -1,8 +1,8 @@
|
||||
"""
|
||||
Hive API - Agent Management Endpoints
|
||||
WHOOSH API - Agent Management Endpoints
|
||||
|
||||
This module provides comprehensive API endpoints for managing Ollama-based AI agents
|
||||
in the Hive distributed orchestration platform. It handles agent registration,
|
||||
in the WHOOSH distributed orchestration platform. It handles agent registration,
|
||||
status monitoring, and lifecycle management.
|
||||
|
||||
Key Features:
|
||||
@@ -42,7 +42,7 @@ logger = logging.getLogger(__name__)
|
||||
status_code=status.HTTP_200_OK,
|
||||
summary="List all registered agents",
|
||||
description="""
|
||||
Retrieve a comprehensive list of all registered agents in the Hive cluster.
|
||||
Retrieve a comprehensive list of all registered agents in the WHOOSH cluster.
|
||||
|
||||
This endpoint returns detailed information about each agent including:
|
||||
- Agent identification and endpoint information
|
||||
@@ -114,7 +114,7 @@ async def get_agents(
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
summary="Register a new Ollama agent",
|
||||
description="""
|
||||
Register a new Ollama-based AI agent with the Hive cluster.
|
||||
Register a new Ollama-based AI agent with the WHOOSH cluster.
|
||||
|
||||
This endpoint allows you to add new Ollama agents to the distributed AI network.
|
||||
The agent will be validated for connectivity and model availability before registration.
|
||||
@@ -136,7 +136,7 @@ async def get_agents(
|
||||
- `reasoning`: Complex reasoning and problem-solving tasks
|
||||
|
||||
**Requirements:**
|
||||
- Agent endpoint must be accessible from the Hive cluster
|
||||
- Agent endpoint must be accessible from the WHOOSH cluster
|
||||
- Specified model must be available on the target Ollama instance
|
||||
- Agent ID must be unique across the cluster
|
||||
""",
|
||||
@@ -153,7 +153,7 @@ async def register_agent(
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context)
|
||||
) -> AgentRegistrationResponse:
|
||||
"""
|
||||
Register a new Ollama agent in the Hive cluster.
|
||||
Register a new Ollama agent in the WHOOSH cluster.
|
||||
|
||||
Args:
|
||||
agent_data: Agent configuration and registration details
|
||||
@@ -167,13 +167,13 @@ async def register_agent(
|
||||
HTTPException: If registration fails due to validation or connectivity issues
|
||||
"""
|
||||
# Access coordinator through the dependency injection
|
||||
hive_coordinator = getattr(request.app.state, 'hive_coordinator', None)
|
||||
if not hive_coordinator:
|
||||
whoosh_coordinator = getattr(request.app.state, 'whoosh_coordinator', None)
|
||||
if not whoosh_coordinator:
|
||||
# Fallback to global coordinator if app state not available
|
||||
from ..main import unified_coordinator
|
||||
hive_coordinator = unified_coordinator
|
||||
whoosh_coordinator = unified_coordinator
|
||||
|
||||
if not hive_coordinator:
|
||||
if not whoosh_coordinator:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="Coordinator service unavailable"
|
||||
@@ -199,7 +199,7 @@ async def register_agent(
|
||||
)
|
||||
|
||||
# Add agent to coordinator
|
||||
hive_coordinator.add_agent(agent)
|
||||
whoosh_coordinator.add_agent(agent)
|
||||
|
||||
return AgentRegistrationResponse(
|
||||
agent_id=agent.id,
|
||||
@@ -303,7 +303,7 @@ async def get_agent(
|
||||
status_code=status.HTTP_204_NO_CONTENT,
|
||||
summary="Unregister an agent",
|
||||
description="""
|
||||
Remove an agent from the Hive cluster.
|
||||
Remove an agent from the WHOOSH cluster.
|
||||
|
||||
This endpoint safely removes an agent from the cluster by:
|
||||
1. Checking for active tasks and optionally waiting for completion
|
||||
@@ -337,7 +337,7 @@ async def unregister_agent(
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context)
|
||||
):
|
||||
"""
|
||||
Unregister an agent from the Hive cluster.
|
||||
Unregister an agent from the WHOOSH cluster.
|
||||
|
||||
Args:
|
||||
agent_id: Unique identifier of the agent to remove
|
||||
@@ -349,12 +349,12 @@ async def unregister_agent(
|
||||
HTTPException: If agent not found, has active tasks, or removal fails
|
||||
"""
|
||||
# Access coordinator
|
||||
hive_coordinator = getattr(request.app.state, 'hive_coordinator', None)
|
||||
if not hive_coordinator:
|
||||
whoosh_coordinator = getattr(request.app.state, 'whoosh_coordinator', None)
|
||||
if not whoosh_coordinator:
|
||||
from ..main import unified_coordinator
|
||||
hive_coordinator = unified_coordinator
|
||||
whoosh_coordinator = unified_coordinator
|
||||
|
||||
if not hive_coordinator:
|
||||
if not whoosh_coordinator:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="Coordinator service unavailable"
|
||||
@@ -377,7 +377,7 @@ async def unregister_agent(
|
||||
)
|
||||
|
||||
# Remove from coordinator
|
||||
hive_coordinator.remove_agent(agent_id)
|
||||
whoosh_coordinator.remove_agent(agent_id)
|
||||
|
||||
# Remove from database
|
||||
db.delete(db_agent)
|
||||
@@ -406,7 +406,7 @@ async def unregister_agent(
|
||||
- Maintain their registration in the cluster
|
||||
|
||||
Agents should call this endpoint every 30-60 seconds to maintain
|
||||
their active status in the Hive cluster.
|
||||
their active status in the WHOOSH cluster.
|
||||
""",
|
||||
responses={
|
||||
200: {"description": "Heartbeat received successfully"},
|
||||
@@ -436,12 +436,12 @@ async def agent_heartbeat(
|
||||
)
|
||||
|
||||
# Access coordinator
|
||||
hive_coordinator = getattr(request.app.state, 'hive_coordinator', None)
|
||||
if not hive_coordinator:
|
||||
whoosh_coordinator = getattr(request.app.state, 'whoosh_coordinator', None)
|
||||
if not whoosh_coordinator:
|
||||
from ..main import unified_coordinator
|
||||
hive_coordinator = unified_coordinator
|
||||
whoosh_coordinator = unified_coordinator
|
||||
|
||||
if not hive_coordinator:
|
||||
if not whoosh_coordinator:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="Coordinator service unavailable"
|
||||
@@ -449,7 +449,7 @@ async def agent_heartbeat(
|
||||
|
||||
try:
|
||||
# Update agent heartbeat timestamp
|
||||
agent_service = hive_coordinator.agent_service
|
||||
agent_service = whoosh_coordinator.agent_service
|
||||
if agent_service:
|
||||
agent_service.update_agent_heartbeat(agent_id)
|
||||
|
||||
@@ -491,7 +491,7 @@ async def agent_heartbeat(
|
||||
Register an agent automatically with capability detection.
|
||||
|
||||
This endpoint is designed for Bzzz agents running as systemd services
|
||||
to automatically register themselves with the Hive coordinator.
|
||||
to automatically register themselves with the WHOOSH coordinator.
|
||||
|
||||
Features:
|
||||
- Automatic capability detection based on available models
|
||||
@@ -511,7 +511,7 @@ async def auto_register_agent(
|
||||
request: Request
|
||||
) -> AgentRegistrationResponse:
|
||||
"""
|
||||
Automatically register a Bzzz agent with the Hive coordinator.
|
||||
Automatically register a Bzzz agent with the WHOOSH coordinator.
|
||||
|
||||
Args:
|
||||
agent_data: Agent configuration including endpoint, models, etc.
|
||||
@@ -532,12 +532,12 @@ async def auto_register_agent(
|
||||
)
|
||||
|
||||
# Access coordinator
|
||||
hive_coordinator = getattr(request.app.state, 'hive_coordinator', None)
|
||||
if not hive_coordinator:
|
||||
whoosh_coordinator = getattr(request.app.state, 'whoosh_coordinator', None)
|
||||
if not whoosh_coordinator:
|
||||
from ..main import unified_coordinator
|
||||
hive_coordinator = unified_coordinator
|
||||
whoosh_coordinator = unified_coordinator
|
||||
|
||||
if not hive_coordinator:
|
||||
if not whoosh_coordinator:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="Coordinator service unavailable"
|
||||
|
||||
350
backend/app/api/ai_models.py
Normal file
350
backend/app/api/ai_models.py
Normal file
@@ -0,0 +1,350 @@
|
||||
"""
|
||||
WHOOSH AI Models API - Phase 6.1
|
||||
REST API endpoints for AI model management and usage
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
||||
from typing import List, Dict, Any, Optional
|
||||
from pydantic import BaseModel
|
||||
import logging
|
||||
|
||||
from app.services.ai_model_service import ai_model_service, ModelCapability, AIModel
|
||||
from app.core.auth_deps import get_current_user
|
||||
from app.models.user import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/ai-models", tags=["AI Models"])
|
||||
|
||||
# Request/Response Models
|
||||
class CompletionRequest(BaseModel):
|
||||
prompt: str
|
||||
model_name: Optional[str] = None
|
||||
system_prompt: Optional[str] = None
|
||||
max_tokens: int = 1000
|
||||
temperature: float = 0.7
|
||||
task_type: Optional[str] = None
|
||||
context_requirements: int = 2048
|
||||
|
||||
class CompletionResponse(BaseModel):
|
||||
success: bool
|
||||
content: Optional[str] = None
|
||||
model: str
|
||||
response_time: Optional[float] = None
|
||||
usage_stats: Optional[Dict[str, Any]] = None
|
||||
error: Optional[str] = None
|
||||
|
||||
class ModelInfo(BaseModel):
|
||||
name: str
|
||||
node_url: str
|
||||
capabilities: List[str]
|
||||
context_length: int
|
||||
parameter_count: str
|
||||
specialization: Optional[str] = None
|
||||
performance_score: float
|
||||
availability: bool
|
||||
usage_count: int
|
||||
avg_response_time: float
|
||||
|
||||
class ClusterStatus(BaseModel):
|
||||
total_nodes: int
|
||||
healthy_nodes: int
|
||||
total_models: int
|
||||
models_by_capability: Dict[str, int]
|
||||
cluster_load: float
|
||||
model_usage_stats: Dict[str, Dict[str, Any]]
|
||||
|
||||
class ModelSelectionRequest(BaseModel):
|
||||
task_type: str
|
||||
context_requirements: int = 2048
|
||||
prefer_specialized: bool = True
|
||||
|
||||
class CodeGenerationRequest(BaseModel):
|
||||
description: str
|
||||
language: str = "python"
|
||||
context: Optional[str] = None
|
||||
style: str = "clean" # clean, optimized, documented
|
||||
max_tokens: int = 2000
|
||||
|
||||
class CodeReviewRequest(BaseModel):
|
||||
code: str
|
||||
language: str
|
||||
focus_areas: List[str] = ["bugs", "performance", "security", "style"]
|
||||
severity_level: str = "medium" # low, medium, high
|
||||
|
||||
@router.on_event("startup")
|
||||
async def startup_ai_service():
|
||||
"""Initialize AI model service on startup"""
|
||||
try:
|
||||
await ai_model_service.initialize()
|
||||
logger.info("AI Model Service initialized successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize AI Model Service: {e}")
|
||||
|
||||
@router.on_event("shutdown")
|
||||
async def shutdown_ai_service():
|
||||
"""Cleanup AI model service on shutdown"""
|
||||
await ai_model_service.cleanup()
|
||||
|
||||
@router.get("/status", response_model=ClusterStatus)
|
||||
async def get_cluster_status(current_user: User = Depends(get_current_user)):
|
||||
"""Get comprehensive cluster status"""
|
||||
try:
|
||||
status = await ai_model_service.get_cluster_status()
|
||||
return ClusterStatus(**status)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting cluster status: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to get cluster status")
|
||||
|
||||
@router.get("/models", response_model=List[ModelInfo])
|
||||
async def list_available_models(current_user: User = Depends(get_current_user)):
|
||||
"""List all available AI models across the cluster"""
|
||||
try:
|
||||
models = []
|
||||
for model in ai_model_service.models.values():
|
||||
models.append(ModelInfo(
|
||||
name=model.name,
|
||||
node_url=model.node_url,
|
||||
capabilities=[cap.value for cap in model.capabilities],
|
||||
context_length=model.context_length,
|
||||
parameter_count=model.parameter_count,
|
||||
specialization=model.specialization,
|
||||
performance_score=model.performance_score,
|
||||
availability=model.availability,
|
||||
usage_count=model.usage_count,
|
||||
avg_response_time=model.avg_response_time
|
||||
))
|
||||
|
||||
return sorted(models, key=lambda x: x.name)
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing models: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to list models")
|
||||
|
||||
@router.post("/select-model", response_model=ModelInfo)
|
||||
async def select_best_model(
|
||||
request: ModelSelectionRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Select the best model for a specific task"""
|
||||
try:
|
||||
# Convert task_type string to enum
|
||||
try:
|
||||
task_capability = ModelCapability(request.task_type)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid task type: {request.task_type}"
|
||||
)
|
||||
|
||||
model = await ai_model_service.get_best_model_for_task(
|
||||
task_type=task_capability,
|
||||
context_requirements=request.context_requirements,
|
||||
prefer_specialized=request.prefer_specialized
|
||||
)
|
||||
|
||||
if not model:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="No suitable model found for the specified task"
|
||||
)
|
||||
|
||||
return ModelInfo(
|
||||
name=model.name,
|
||||
node_url=model.node_url,
|
||||
capabilities=[cap.value for cap in model.capabilities],
|
||||
context_length=model.context_length,
|
||||
parameter_count=model.parameter_count,
|
||||
specialization=model.specialization,
|
||||
performance_score=model.performance_score,
|
||||
availability=model.availability,
|
||||
usage_count=model.usage_count,
|
||||
avg_response_time=model.avg_response_time
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error selecting model: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to select model")
|
||||
|
||||
@router.post("/generate", response_model=CompletionResponse)
|
||||
async def generate_completion(
|
||||
request: CompletionRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Generate completion using AI model"""
|
||||
try:
|
||||
model_name = request.model_name
|
||||
|
||||
# Auto-select model if not specified
|
||||
if not model_name and request.task_type:
|
||||
try:
|
||||
task_capability = ModelCapability(request.task_type)
|
||||
best_model = await ai_model_service.get_best_model_for_task(
|
||||
task_type=task_capability,
|
||||
context_requirements=request.context_requirements
|
||||
)
|
||||
if best_model:
|
||||
model_name = best_model.name
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if not model_name:
|
||||
# Default to first available model
|
||||
available_models = [m for m in ai_model_service.models.values() if m.availability]
|
||||
if not available_models:
|
||||
raise HTTPException(status_code=503, detail="No models available")
|
||||
model_name = available_models[0].name
|
||||
|
||||
result = await ai_model_service.generate_completion(
|
||||
model_name=model_name,
|
||||
prompt=request.prompt,
|
||||
system_prompt=request.system_prompt,
|
||||
max_tokens=request.max_tokens,
|
||||
temperature=request.temperature
|
||||
)
|
||||
|
||||
return CompletionResponse(**result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating completion: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/code/generate", response_model=CompletionResponse)
|
||||
async def generate_code(
|
||||
request: CodeGenerationRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Generate code using AI models optimized for coding"""
|
||||
try:
|
||||
# Select best coding model
|
||||
coding_model = await ai_model_service.get_best_model_for_task(
|
||||
task_type=ModelCapability.CODE_GENERATION,
|
||||
context_requirements=max(2048, len(request.description) * 4)
|
||||
)
|
||||
|
||||
if not coding_model:
|
||||
raise HTTPException(status_code=503, detail="No coding models available")
|
||||
|
||||
# Craft specialized prompt for code generation
|
||||
system_prompt = f"""You are an expert {request.language} programmer. Generate clean, well-documented, and efficient code.
|
||||
Style preferences: {request.style}
|
||||
Language: {request.language}
|
||||
Focus on: best practices, readability, and maintainability."""
|
||||
|
||||
prompt = f"""Generate {request.language} code for the following requirement:
|
||||
|
||||
Description: {request.description}
|
||||
|
||||
{f"Context: {request.context}" if request.context else ""}
|
||||
|
||||
Please provide:
|
||||
1. Clean, well-structured code
|
||||
2. Appropriate comments and documentation
|
||||
3. Error handling where relevant
|
||||
4. Following {request.language} best practices
|
||||
|
||||
Code:"""
|
||||
|
||||
result = await ai_model_service.generate_completion(
|
||||
model_name=coding_model.name,
|
||||
prompt=prompt,
|
||||
system_prompt=system_prompt,
|
||||
max_tokens=request.max_tokens,
|
||||
temperature=0.3 # Lower temperature for more deterministic code
|
||||
)
|
||||
|
||||
return CompletionResponse(**result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating code: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/code/review", response_model=CompletionResponse)
|
||||
async def review_code(
|
||||
request: CodeReviewRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Review code using AI models optimized for code analysis"""
|
||||
try:
|
||||
# Select best code review model
|
||||
review_model = await ai_model_service.get_best_model_for_task(
|
||||
task_type=ModelCapability.CODE_REVIEW,
|
||||
context_requirements=max(4096, len(request.code) * 2)
|
||||
)
|
||||
|
||||
if not review_model:
|
||||
raise HTTPException(status_code=503, detail="No code review models available")
|
||||
|
||||
# Craft specialized prompt for code review
|
||||
system_prompt = f"""You are an expert code reviewer specializing in {request.language}.
|
||||
Provide constructive, actionable feedback focusing on: {', '.join(request.focus_areas)}.
|
||||
Severity level: {request.severity_level}
|
||||
Be specific about line numbers and provide concrete suggestions for improvement."""
|
||||
|
||||
focus_description = {
|
||||
"bugs": "potential bugs and logic errors",
|
||||
"performance": "performance optimizations and efficiency",
|
||||
"security": "security vulnerabilities and best practices",
|
||||
"style": "code style, formatting, and conventions",
|
||||
"maintainability": "code maintainability and readability",
|
||||
"testing": "test coverage and testability"
|
||||
}
|
||||
|
||||
focus_details = [focus_description.get(area, area) for area in request.focus_areas]
|
||||
|
||||
prompt = f"""Please review this {request.language} code focusing on: {', '.join(focus_details)}
|
||||
|
||||
Code to review:
|
||||
```{request.language}
|
||||
{request.code}
|
||||
```
|
||||
|
||||
Provide a detailed review including:
|
||||
1. Overall assessment
|
||||
2. Specific issues found (with line references if applicable)
|
||||
3. Recommendations for improvement
|
||||
4. Best practices that could be applied
|
||||
5. Security considerations (if applicable)
|
||||
|
||||
Review:"""
|
||||
|
||||
result = await ai_model_service.generate_completion(
|
||||
model_name=review_model.name,
|
||||
prompt=prompt,
|
||||
system_prompt=system_prompt,
|
||||
max_tokens=2000,
|
||||
temperature=0.5
|
||||
)
|
||||
|
||||
return CompletionResponse(**result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error reviewing code: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/refresh-models")
|
||||
async def refresh_model_discovery(
|
||||
background_tasks: BackgroundTasks,
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Refresh model discovery across the cluster"""
|
||||
try:
|
||||
background_tasks.add_task(ai_model_service.discover_cluster_models)
|
||||
return {"message": "Model discovery refresh initiated"}
|
||||
except Exception as e:
|
||||
logger.error(f"Error refreshing models: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to refresh models")
|
||||
|
||||
@router.get("/capabilities")
|
||||
async def list_model_capabilities():
|
||||
"""List all available model capabilities"""
|
||||
return {
|
||||
"capabilities": [
|
||||
{
|
||||
"name": cap.value,
|
||||
"description": cap.value.replace("_", " ").title()
|
||||
}
|
||||
for cap in ModelCapability
|
||||
]
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
"""
|
||||
Authentication API endpoints for Hive platform.
|
||||
Authentication API endpoints for WHOOSH platform.
|
||||
Handles user registration, login, token refresh, and API key management.
|
||||
"""
|
||||
|
||||
|
||||
@@ -95,12 +95,12 @@ async def auto_discover_agents(
|
||||
AutoDiscoveryResponse: Discovery results and registration status
|
||||
"""
|
||||
# Access coordinator
|
||||
hive_coordinator = getattr(request.app.state, 'hive_coordinator', None)
|
||||
if not hive_coordinator:
|
||||
whoosh_coordinator = getattr(request.app.state, 'whoosh_coordinator', None)
|
||||
if not whoosh_coordinator:
|
||||
from ..main import unified_coordinator
|
||||
hive_coordinator = unified_coordinator
|
||||
whoosh_coordinator = unified_coordinator
|
||||
|
||||
if not hive_coordinator:
|
||||
if not whoosh_coordinator:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="Coordinator service unavailable"
|
||||
@@ -184,7 +184,7 @@ async def auto_discover_agents(
|
||||
)
|
||||
|
||||
# Add to coordinator
|
||||
hive_coordinator.add_agent(agent)
|
||||
whoosh_coordinator.add_agent(agent)
|
||||
registered_agents.append(agent_id)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
266
backend/app/api/bzzz_integration.py
Normal file
266
backend/app/api/bzzz_integration.py
Normal file
@@ -0,0 +1,266 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
BZZZ Integration API for WHOOSH
|
||||
API endpoints for team collaboration, decision publishing, and consensus mechanisms
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends, Query
|
||||
from typing import Dict, List, Optional, Any
|
||||
from pydantic import BaseModel, Field
|
||||
from datetime import datetime
|
||||
|
||||
from ..services.bzzz_integration_service import bzzz_service, AgentRole
|
||||
from ..core.auth_deps import get_current_user
|
||||
from ..models.user import User
|
||||
|
||||
router = APIRouter(prefix="/api/bzzz", tags=["BZZZ Integration"])
|
||||
|
||||
# Pydantic models for API requests/responses
|
||||
|
||||
class DecisionRequest(BaseModel):
|
||||
title: str = Field(..., description="Decision title")
|
||||
description: str = Field(..., description="Detailed decision description")
|
||||
context: Dict[str, Any] = Field(default_factory=dict, description="Decision context data")
|
||||
ucxl_address: Optional[str] = Field(None, description="Related UCXL address")
|
||||
|
||||
class DecisionResponse(BaseModel):
|
||||
decision_id: str
|
||||
title: str
|
||||
description: str
|
||||
author_role: str
|
||||
timestamp: datetime
|
||||
ucxl_address: Optional[str] = None
|
||||
|
||||
class TaskAssignmentRequest(BaseModel):
|
||||
task_description: str = Field(..., description="Task description")
|
||||
required_capabilities: List[str] = Field(..., description="Required capabilities")
|
||||
priority: str = Field("medium", description="Task priority (low, medium, high, urgent)")
|
||||
|
||||
class TaskAssignmentResponse(BaseModel):
|
||||
decision_id: Optional[str]
|
||||
assigned_to: str
|
||||
assignment_score: float
|
||||
alternatives: List[Dict[str, Any]]
|
||||
|
||||
class TeamMemberInfo(BaseModel):
|
||||
agent_id: str
|
||||
role: str
|
||||
endpoint: str
|
||||
capabilities: List[str]
|
||||
status: str
|
||||
|
||||
class TeamStatusResponse(BaseModel):
|
||||
total_members: int
|
||||
online_members: int
|
||||
offline_members: int
|
||||
role_distribution: Dict[str, int]
|
||||
active_decisions: int
|
||||
recent_decisions: List[Dict[str, Any]]
|
||||
network_health: float
|
||||
|
||||
class ConsensusResponse(BaseModel):
|
||||
decision_id: str
|
||||
total_votes: int
|
||||
approvals: int
|
||||
approval_rate: float
|
||||
consensus_reached: bool
|
||||
details: Dict[str, Any]
|
||||
|
||||
@router.get("/status", response_model=TeamStatusResponse)
|
||||
async def get_team_status(
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> TeamStatusResponse:
|
||||
"""Get current BZZZ team status and network health"""
|
||||
try:
|
||||
status = await bzzz_service.get_team_status()
|
||||
return TeamStatusResponse(**status)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get team status: {str(e)}")
|
||||
|
||||
@router.get("/members", response_model=List[TeamMemberInfo])
|
||||
async def get_team_members(
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> List[TeamMemberInfo]:
|
||||
"""Get list of active team members in BZZZ network"""
|
||||
try:
|
||||
members = []
|
||||
for member in bzzz_service.team_members.values():
|
||||
members.append(TeamMemberInfo(
|
||||
agent_id=member.agent_id,
|
||||
role=member.role.value,
|
||||
endpoint=member.endpoint,
|
||||
capabilities=member.capabilities,
|
||||
status=member.status
|
||||
))
|
||||
return members
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get team members: {str(e)}")
|
||||
|
||||
@router.post("/decisions", response_model=Dict[str, str])
|
||||
async def publish_decision(
|
||||
decision: DecisionRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> Dict[str, str]:
|
||||
"""
|
||||
Publish a decision to the BZZZ network for team consensus
|
||||
"""
|
||||
try:
|
||||
decision_id = await bzzz_service.publish_decision(
|
||||
title=decision.title,
|
||||
description=decision.description,
|
||||
context=decision.context,
|
||||
ucxl_address=decision.ucxl_address
|
||||
)
|
||||
|
||||
if decision_id:
|
||||
return {"decision_id": decision_id, "status": "published"}
|
||||
else:
|
||||
raise HTTPException(status_code=500, detail="Failed to publish decision")
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to publish decision: {str(e)}")
|
||||
|
||||
@router.get("/decisions", response_model=List[DecisionResponse])
|
||||
async def get_recent_decisions(
|
||||
limit: int = Query(10, ge=1, le=100),
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> List[DecisionResponse]:
|
||||
"""Get recent decisions from BZZZ network"""
|
||||
try:
|
||||
decisions = sorted(
|
||||
bzzz_service.active_decisions.values(),
|
||||
key=lambda d: d.timestamp,
|
||||
reverse=True
|
||||
)[:limit]
|
||||
|
||||
return [
|
||||
DecisionResponse(
|
||||
decision_id=decision.id,
|
||||
title=decision.title,
|
||||
description=decision.description,
|
||||
author_role=decision.author_role,
|
||||
timestamp=decision.timestamp,
|
||||
ucxl_address=decision.ucxl_address
|
||||
)
|
||||
for decision in decisions
|
||||
]
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get decisions: {str(e)}")
|
||||
|
||||
@router.get("/decisions/{decision_id}/consensus", response_model=Optional[ConsensusResponse])
|
||||
async def get_decision_consensus(
|
||||
decision_id: str,
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> Optional[ConsensusResponse]:
|
||||
"""Get consensus status for a specific decision"""
|
||||
try:
|
||||
consensus = await bzzz_service.get_team_consensus(decision_id)
|
||||
|
||||
if consensus:
|
||||
return ConsensusResponse(**consensus)
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Decision not found or no consensus data available")
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get consensus: {str(e)}")
|
||||
|
||||
@router.post("/tasks/assign", response_model=TaskAssignmentResponse)
|
||||
async def coordinate_task_assignment(
|
||||
task: TaskAssignmentRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> TaskAssignmentResponse:
|
||||
"""
|
||||
Coordinate task assignment across team members based on capabilities and availability
|
||||
"""
|
||||
try:
|
||||
assignment = await bzzz_service.coordinate_task_assignment(
|
||||
task_description=task.task_description,
|
||||
required_capabilities=task.required_capabilities,
|
||||
priority=task.priority
|
||||
)
|
||||
|
||||
if assignment:
|
||||
return TaskAssignmentResponse(**assignment)
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="No suitable team members found for task")
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to coordinate task assignment: {str(e)}")
|
||||
|
||||
@router.post("/network/discover")
|
||||
async def rediscover_network(
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> Dict[str, Any]:
|
||||
"""Manually trigger team member discovery"""
|
||||
try:
|
||||
await bzzz_service._discover_team_members()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"members_discovered": len(bzzz_service.team_members),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to rediscover network: {str(e)}")
|
||||
|
||||
@router.get("/roles", response_model=List[str])
|
||||
async def get_available_roles() -> List[str]:
|
||||
"""Get list of available agent roles in BZZZ system"""
|
||||
return [role.value for role in AgentRole]
|
||||
|
||||
@router.get("/capabilities/{agent_id}", response_model=Dict[str, Any])
|
||||
async def get_agent_capabilities(
|
||||
agent_id: str,
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> Dict[str, Any]:
|
||||
"""Get detailed capabilities of a specific team member"""
|
||||
try:
|
||||
if agent_id not in bzzz_service.team_members:
|
||||
raise HTTPException(status_code=404, detail=f"Agent {agent_id} not found")
|
||||
|
||||
member = bzzz_service.team_members[agent_id]
|
||||
|
||||
return {
|
||||
"agent_id": member.agent_id,
|
||||
"role": member.role.value,
|
||||
"capabilities": member.capabilities,
|
||||
"status": member.status,
|
||||
"endpoint": member.endpoint,
|
||||
"last_seen": datetime.utcnow().isoformat() # Placeholder
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get agent capabilities: {str(e)}")
|
||||
|
||||
@router.get("/health")
|
||||
async def bzzz_health_check() -> Dict[str, Any]:
|
||||
"""BZZZ integration health check endpoint"""
|
||||
try:
|
||||
total_members = len(bzzz_service.team_members)
|
||||
online_members = sum(1 for m in bzzz_service.team_members.values() if m.status == "online")
|
||||
|
||||
health_status = "healthy" if online_members >= total_members * 0.5 else "degraded"
|
||||
if online_members == 0:
|
||||
health_status = "offline"
|
||||
|
||||
return {
|
||||
"status": health_status,
|
||||
"bzzz_endpoints": len(bzzz_service.bzzz_endpoints),
|
||||
"team_members": total_members,
|
||||
"online_members": online_members,
|
||||
"active_decisions": len(bzzz_service.active_decisions),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"error": str(e),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
# Note: Exception handlers are registered at the app level, not router level
|
||||
@@ -111,7 +111,7 @@ class BzzzLogStreamer:
|
||||
self.last_indices = {} # Track last seen index per agent
|
||||
|
||||
async def discover_bzzz_agents(self) -> List[Dict[str, str]]:
|
||||
"""Discover active Bzzz agents from the Hive agents API"""
|
||||
"""Discover active Bzzz agents from the WHOOSH agents API"""
|
||||
try:
|
||||
# This would typically query the actual agents database
|
||||
# For now, return known endpoints based on cluster nodes
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""
|
||||
Hive API - CLI Agent Management Endpoints
|
||||
WHOOSH API - CLI Agent Management Endpoints
|
||||
|
||||
This module provides comprehensive API endpoints for managing CLI-based AI agents
|
||||
in the Hive distributed orchestration platform. CLI agents enable integration with
|
||||
in the WHOOSH distributed orchestration platform. CLI agents enable integration with
|
||||
cloud-based AI services and external tools through command-line interfaces.
|
||||
|
||||
Key Features:
|
||||
@@ -34,7 +34,7 @@ from ..core.error_handlers import (
|
||||
agent_not_found_error,
|
||||
agent_already_exists_error,
|
||||
validation_error,
|
||||
HiveAPIException
|
||||
WHOOSHAPIException
|
||||
)
|
||||
from ..core.auth_deps import get_current_user_context
|
||||
|
||||
@@ -47,9 +47,9 @@ router = APIRouter(prefix="/api/cli-agents", tags=["cli-agents"])
|
||||
status_code=status.HTTP_200_OK,
|
||||
summary="List all CLI agents",
|
||||
description="""
|
||||
Retrieve a comprehensive list of all CLI-based agents in the Hive cluster.
|
||||
Retrieve a comprehensive list of all CLI-based agents in the WHOOSH cluster.
|
||||
|
||||
CLI agents are cloud-based or remote AI agents that integrate with Hive through
|
||||
CLI agents are cloud-based or remote AI agents that integrate with WHOOSH through
|
||||
command-line interfaces, providing access to advanced AI models and services.
|
||||
|
||||
**CLI Agent Information Includes:**
|
||||
@@ -188,10 +188,10 @@ async def get_cli_agents(
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
summary="Register a new CLI agent",
|
||||
description="""
|
||||
Register a new CLI-based AI agent with the Hive cluster.
|
||||
Register a new CLI-based AI agent with the WHOOSH cluster.
|
||||
|
||||
This endpoint enables integration of cloud-based AI services and remote tools
|
||||
through command-line interfaces, expanding Hive's AI capabilities beyond local models.
|
||||
through command-line interfaces, expanding WHOOSH's AI capabilities beyond local models.
|
||||
|
||||
**CLI Agent Registration Process:**
|
||||
1. **Connectivity Validation**: Test SSH/CLI connection to target host
|
||||
@@ -304,7 +304,7 @@ async def register_cli_agent(
|
||||
"warning": "Connectivity test failed - registering anyway for development"
|
||||
}
|
||||
|
||||
# Map specialization to Hive AgentType
|
||||
# Map specialization to WHOOSH AgentType
|
||||
specialization_mapping = {
|
||||
"general_ai": AgentType.GENERAL_AI,
|
||||
"reasoning": AgentType.REASONING,
|
||||
@@ -314,14 +314,14 @@ async def register_cli_agent(
|
||||
"cli_gemini": AgentType.CLI_GEMINI
|
||||
}
|
||||
|
||||
hive_specialty = specialization_mapping.get(agent_data.specialization, AgentType.GENERAL_AI)
|
||||
whoosh_specialty = specialization_mapping.get(agent_data.specialization, AgentType.GENERAL_AI)
|
||||
|
||||
# Create Hive Agent object
|
||||
hive_agent = Agent(
|
||||
# Create WHOOSH Agent object
|
||||
whoosh_agent = Agent(
|
||||
id=agent_data.id,
|
||||
endpoint=f"cli://{agent_data.host}",
|
||||
model=agent_data.model,
|
||||
specialty=hive_specialty,
|
||||
specialty=whoosh_specialty,
|
||||
max_concurrent=agent_data.max_concurrent,
|
||||
current_tasks=0,
|
||||
agent_type="cli",
|
||||
@@ -330,16 +330,16 @@ async def register_cli_agent(
|
||||
|
||||
# Store in database
|
||||
db_agent = ORMAgent(
|
||||
id=hive_agent.id,
|
||||
id=whoosh_agent.id,
|
||||
name=f"{agent_data.host}-{agent_data.agent_type}",
|
||||
endpoint=hive_agent.endpoint,
|
||||
model=hive_agent.model,
|
||||
specialty=hive_agent.specialty.value,
|
||||
specialization=hive_agent.specialty.value,
|
||||
max_concurrent=hive_agent.max_concurrent,
|
||||
current_tasks=hive_agent.current_tasks,
|
||||
agent_type=hive_agent.agent_type,
|
||||
cli_config=hive_agent.cli_config
|
||||
endpoint=whoosh_agent.endpoint,
|
||||
model=whoosh_agent.model,
|
||||
specialty=whoosh_agent.specialty.value,
|
||||
specialization=whoosh_agent.specialty.value,
|
||||
max_concurrent=whoosh_agent.max_concurrent,
|
||||
current_tasks=whoosh_agent.current_tasks,
|
||||
agent_type=whoosh_agent.agent_type,
|
||||
cli_config=whoosh_agent.cli_config
|
||||
)
|
||||
|
||||
db.add(db_agent)
|
||||
@@ -351,7 +351,7 @@ async def register_cli_agent(
|
||||
|
||||
return CliAgentRegistrationResponse(
|
||||
agent_id=agent_data.id,
|
||||
endpoint=hive_agent.endpoint,
|
||||
endpoint=whoosh_agent.endpoint,
|
||||
health_check=health,
|
||||
message=f"CLI agent '{agent_data.id}' registered successfully on host '{agent_data.host}'"
|
||||
)
|
||||
@@ -371,10 +371,10 @@ async def register_cli_agent(
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
summary="Register predefined CLI agents",
|
||||
description="""
|
||||
Register a set of predefined CLI agents for common Hive cluster configurations.
|
||||
Register a set of predefined CLI agents for common WHOOSH cluster configurations.
|
||||
|
||||
This endpoint provides a convenient way to quickly set up standard CLI agents
|
||||
for typical Hive deployments, including common host configurations.
|
||||
for typical WHOOSH deployments, including common host configurations.
|
||||
|
||||
**Predefined Agent Sets:**
|
||||
- **Standard Gemini**: walnut-gemini and ironwood-gemini agents
|
||||
@@ -622,7 +622,7 @@ async def health_check_cli_agent(
|
||||
status_code=status.HTTP_204_NO_CONTENT,
|
||||
summary="Unregister a CLI agent",
|
||||
description="""
|
||||
Unregister and remove a CLI agent from the Hive cluster.
|
||||
Unregister and remove a CLI agent from the WHOOSH cluster.
|
||||
|
||||
This endpoint safely removes a CLI agent by stopping active tasks,
|
||||
cleaning up resources, and removing configuration data.
|
||||
@@ -661,7 +661,7 @@ async def unregister_cli_agent(
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context)
|
||||
):
|
||||
"""
|
||||
Unregister a CLI agent from the Hive cluster.
|
||||
Unregister a CLI agent from the WHOOSH cluster.
|
||||
|
||||
Args:
|
||||
agent_id: Unique identifier of the CLI agent to unregister
|
||||
@@ -684,7 +684,7 @@ async def unregister_cli_agent(
|
||||
try:
|
||||
# Check for active tasks unless forced
|
||||
if not force and db_agent.current_tasks > 0:
|
||||
raise HiveAPIException(
|
||||
raise WHOOSHAPIException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"CLI agent '{agent_id}' has {db_agent.current_tasks} active tasks. Use force=true to override.",
|
||||
error_code="AGENT_HAS_ACTIVE_TASKS",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""
|
||||
Cluster Registration API endpoints
|
||||
Handles registration-based cluster management for Hive-Bzzz integration.
|
||||
Handles registration-based cluster management for WHOOSH-Bzzz integration.
|
||||
"""
|
||||
from fastapi import APIRouter, HTTPException, Request, Depends
|
||||
from pydantic import BaseModel, Field
|
||||
@@ -18,7 +18,7 @@ logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
# Initialize service
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://hive:hivepass@localhost:5432/hive")
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://whoosh:whooshpass@localhost:5432/whoosh")
|
||||
cluster_registration_service = ClusterRegistrationService(DATABASE_URL)
|
||||
|
||||
# Pydantic models for API
|
||||
@@ -76,7 +76,7 @@ async def register_node(
|
||||
"""
|
||||
Register a new node in the cluster.
|
||||
|
||||
This endpoint allows Bzzz clients to register themselves with the Hive coordinator
|
||||
This endpoint allows Bzzz clients to register themselves with the WHOOSH coordinator
|
||||
using a valid cluster token. Similar to `docker swarm join`.
|
||||
"""
|
||||
try:
|
||||
|
||||
237
backend/app/api/cluster_setup.py
Normal file
237
backend/app/api/cluster_setup.py
Normal file
@@ -0,0 +1,237 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cluster Setup API Endpoints for WHOOSH
|
||||
Provides REST API for cluster infrastructure setup and BZZZ deployment
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional
|
||||
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..services.cluster_setup_service import cluster_setup_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/cluster-setup", tags=["cluster-setup"])
|
||||
|
||||
# Request/Response Models
|
||||
class NodeConfiguration(BaseModel):
|
||||
hostname: str = Field(..., description="Node hostname")
|
||||
ip_address: str = Field(..., description="Node IP address")
|
||||
ssh_user: str = Field(..., description="SSH username")
|
||||
ssh_port: int = Field(default=22, description="SSH port")
|
||||
ssh_key_path: Optional[str] = Field(None, description="Path to SSH private key")
|
||||
ssh_password: Optional[str] = Field(None, description="SSH password (if not using keys)")
|
||||
role: str = Field(default="worker", description="Node role: coordinator, worker, storage")
|
||||
|
||||
class InfrastructureConfigRequest(BaseModel):
|
||||
nodes: List[NodeConfiguration] = Field(..., description="List of cluster nodes")
|
||||
|
||||
class ModelSelectionRequest(BaseModel):
|
||||
model_names: List[str] = Field(..., description="List of selected model names")
|
||||
|
||||
class AgentDeploymentRequest(BaseModel):
|
||||
coordinator_hostname: str = Field(..., description="Hostname of coordinator node")
|
||||
|
||||
# API Endpoints
|
||||
|
||||
@router.get("/status")
|
||||
async def get_setup_status() -> Dict[str, Any]:
|
||||
"""Get current cluster setup status and progress"""
|
||||
try:
|
||||
logger.info("🔍 Getting cluster setup status")
|
||||
|
||||
status = await cluster_setup_service.get_setup_status()
|
||||
|
||||
logger.info(f"📊 Cluster setup status: {status['next_step']}")
|
||||
return {
|
||||
"success": True,
|
||||
"data": status
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error getting setup status: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/models/available")
|
||||
async def get_available_models() -> Dict[str, Any]:
|
||||
"""Get list of available models from ollama.com registry"""
|
||||
try:
|
||||
logger.info("📋 Fetching available models from registry")
|
||||
|
||||
models = await cluster_setup_service.fetch_ollama_models()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"data": {
|
||||
"models": models,
|
||||
"count": len(models)
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error fetching available models: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/infrastructure/configure")
|
||||
async def configure_infrastructure(request: InfrastructureConfigRequest) -> Dict[str, Any]:
|
||||
"""Configure cluster infrastructure with node connectivity testing"""
|
||||
try:
|
||||
logger.info(f"🏗️ Configuring infrastructure with {len(request.nodes)} nodes")
|
||||
|
||||
# Convert Pydantic models to dicts
|
||||
nodes_data = [node.model_dump() for node in request.nodes]
|
||||
|
||||
result = await cluster_setup_service.configure_infrastructure(nodes_data)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"✅ Infrastructure configured: {result['nodes_accessible']}/{result['nodes_configured']} nodes accessible")
|
||||
else:
|
||||
logger.error(f"❌ Infrastructure configuration failed: {result.get('error')}")
|
||||
|
||||
return {
|
||||
"success": result["success"],
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error configuring infrastructure: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/keys/generate")
|
||||
async def generate_age_keys() -> Dict[str, Any]:
|
||||
"""Generate Age encryption keys for secure P2P communication"""
|
||||
try:
|
||||
logger.info("🔐 Generating Age encryption keys")
|
||||
|
||||
result = await cluster_setup_service.generate_age_keys()
|
||||
|
||||
if result["success"]:
|
||||
logger.info("✅ Age keys generated successfully")
|
||||
else:
|
||||
logger.error(f"❌ Age key generation failed: {result.get('error')}")
|
||||
|
||||
return {
|
||||
"success": result["success"],
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error generating age keys: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/models/select")
|
||||
async def select_models(request: ModelSelectionRequest) -> Dict[str, Any]:
|
||||
"""Select models for cluster deployment"""
|
||||
try:
|
||||
logger.info(f"📦 Selecting {len(request.model_names)} models for cluster")
|
||||
|
||||
result = await cluster_setup_service.select_models(request.model_names)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"✅ Models selected: {request.model_names}")
|
||||
else:
|
||||
logger.error(f"❌ Model selection failed: {result.get('error')}")
|
||||
|
||||
return {
|
||||
"success": result["success"],
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error selecting models: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/agent/deploy-first")
|
||||
async def deploy_first_agent(
|
||||
request: AgentDeploymentRequest,
|
||||
background_tasks: BackgroundTasks
|
||||
) -> Dict[str, Any]:
|
||||
"""Deploy the first BZZZ agent and pull selected models"""
|
||||
try:
|
||||
logger.info(f"🚀 Deploying first BZZZ agent to {request.coordinator_hostname}")
|
||||
|
||||
# This can take a long time, so we could optionally run it in background
|
||||
result = await cluster_setup_service.deploy_first_agent(request.coordinator_hostname)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"✅ First agent deployed successfully to {request.coordinator_hostname}")
|
||||
else:
|
||||
logger.error(f"❌ First agent deployment failed: {result.get('error')}")
|
||||
|
||||
return {
|
||||
"success": result["success"],
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deploying first agent: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/cluster/initialize")
|
||||
async def initialize_cluster(background_tasks: BackgroundTasks) -> Dict[str, Any]:
|
||||
"""Initialize the complete cluster with P2P model distribution"""
|
||||
try:
|
||||
logger.info("🌐 Initializing complete cluster")
|
||||
|
||||
# This definitely takes a long time, consider background task
|
||||
result = await cluster_setup_service.initialize_cluster()
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"✅ Cluster initialized: {result['successful_deployments']}/{result['cluster_nodes']} nodes")
|
||||
else:
|
||||
logger.error(f"❌ Cluster initialization failed: {result.get('error')}")
|
||||
|
||||
return {
|
||||
"success": result["success"],
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error initializing cluster: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/reset")
|
||||
async def reset_setup() -> Dict[str, Any]:
|
||||
"""Reset cluster setup state (for development/testing)"""
|
||||
try:
|
||||
logger.info("🔄 Resetting cluster setup state")
|
||||
|
||||
# Reset the setup service state
|
||||
cluster_setup_service.setup_state = cluster_setup_service.__class__.ClusterSetupState()
|
||||
|
||||
logger.info("✅ Cluster setup state reset")
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Cluster setup state has been reset"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error resetting setup: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Health check for the setup service
|
||||
@router.get("/health")
|
||||
async def health_check() -> Dict[str, Any]:
|
||||
"""Health check for cluster setup service"""
|
||||
try:
|
||||
# Initialize if not already done
|
||||
if not hasattr(cluster_setup_service, 'session') or cluster_setup_service.session is None:
|
||||
await cluster_setup_service.initialize()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"service": "cluster_setup",
|
||||
"status": "healthy",
|
||||
"initialized": cluster_setup_service.session is not None
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Health check failed: {e}")
|
||||
return {
|
||||
"success": False,
|
||||
"service": "cluster_setup",
|
||||
"status": "unhealthy",
|
||||
"error": str(e)
|
||||
}
|
||||
@@ -430,10 +430,10 @@ async def send_feedback_to_rl_curator(
|
||||
"bzzz_type": "feedback_event",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"origin": {
|
||||
"node_id": "hive",
|
||||
"node_id": "whoosh",
|
||||
"agent_id": agent_id,
|
||||
"task_id": f"hive-feedback-{feedback_id}",
|
||||
"workspace": "hive://context-feedback",
|
||||
"task_id": f"whoosh-feedback-{feedback_id}",
|
||||
"workspace": "whoosh://context-feedback",
|
||||
"directory": "/feedback/"
|
||||
},
|
||||
"feedback": {
|
||||
@@ -441,9 +441,9 @@ async def send_feedback_to_rl_curator(
|
||||
"category": "general", # Could be enhanced with category detection
|
||||
"role": role,
|
||||
"context_id": context_id,
|
||||
"reason": f"Feedback from Hive agent {agent_id}",
|
||||
"reason": f"Feedback from WHOOSH agent {agent_id}",
|
||||
"confidence": confidence,
|
||||
"usage_context": "hive_platform"
|
||||
"usage_context": "whoosh_platform"
|
||||
},
|
||||
"task_outcome": {
|
||||
"completed": feedback_type in ["upvote", "task_success"],
|
||||
|
||||
319
backend/app/api/git_repositories.py
Normal file
319
backend/app/api/git_repositories.py
Normal file
@@ -0,0 +1,319 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Git Repositories API Endpoints for WHOOSH
|
||||
Provides REST API for git repository management and integration
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional
|
||||
from fastapi import APIRouter, HTTPException, Query, Depends
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from ..services.git_repository_service import git_repository_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/git-repositories", tags=["git-repositories"])
|
||||
|
||||
# Request/Response Models
|
||||
class GitCredentialsRequest(BaseModel):
|
||||
username: Optional[str] = Field(None, description="Git username")
|
||||
password: Optional[str] = Field(None, description="Git password or token")
|
||||
ssh_key_content: Optional[str] = Field(None, description="SSH private key content")
|
||||
ssh_key_path: Optional[str] = Field(None, description="Path to SSH private key file")
|
||||
auth_type: str = Field(default="https", description="Authentication type: https, ssh, token")
|
||||
|
||||
@field_validator('auth_type')
|
||||
@classmethod
|
||||
def validate_auth_type(cls, v):
|
||||
if v not in ['https', 'ssh', 'token']:
|
||||
raise ValueError('auth_type must be one of: https, ssh, token')
|
||||
return v
|
||||
|
||||
class AddRepositoryRequest(BaseModel):
|
||||
name: str = Field(..., description="Repository display name")
|
||||
url: str = Field(..., description="Git repository URL")
|
||||
credentials: GitCredentialsRequest = Field(..., description="Git authentication credentials")
|
||||
project_id: Optional[str] = Field(None, description="Associated project ID")
|
||||
|
||||
@field_validator('url')
|
||||
@classmethod
|
||||
def validate_url(cls, v):
|
||||
if not v.startswith(('http://', 'https://', 'git@', 'ssh://')):
|
||||
raise ValueError('URL must be a valid git repository URL')
|
||||
return v
|
||||
|
||||
class UpdateCredentialsRequest(BaseModel):
|
||||
credentials: GitCredentialsRequest = Field(..., description="Updated git credentials")
|
||||
|
||||
# API Endpoints
|
||||
|
||||
@router.get("/")
|
||||
async def list_repositories(
|
||||
project_id: Optional[str] = Query(None, description="Filter by project ID")
|
||||
) -> Dict[str, Any]:
|
||||
"""Get list of all git repositories, optionally filtered by project"""
|
||||
try:
|
||||
logger.info(f"📂 Listing repositories (project_id: {project_id})")
|
||||
|
||||
repositories = await git_repository_service.get_repositories(project_id)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"data": {
|
||||
"repositories": repositories,
|
||||
"count": len(repositories)
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error listing repositories: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/")
|
||||
async def add_repository(request: AddRepositoryRequest) -> Dict[str, Any]:
|
||||
"""Add a new git repository with credentials"""
|
||||
try:
|
||||
logger.info(f"📥 Adding repository: {request.name}")
|
||||
|
||||
# Convert credentials to dict
|
||||
credentials_dict = request.credentials.dict()
|
||||
|
||||
result = await git_repository_service.add_repository(
|
||||
name=request.name,
|
||||
url=request.url,
|
||||
credentials=credentials_dict,
|
||||
project_id=request.project_id
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"✅ Repository {request.name} added successfully")
|
||||
else:
|
||||
logger.error(f"❌ Failed to add repository {request.name}: {result.get('error')}")
|
||||
|
||||
return {
|
||||
"success": result["success"],
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error adding repository: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/{repo_id}")
|
||||
async def get_repository(repo_id: str) -> Dict[str, Any]:
|
||||
"""Get details of a specific repository"""
|
||||
try:
|
||||
logger.info(f"🔍 Getting repository: {repo_id}")
|
||||
|
||||
repository = await git_repository_service.get_repository(repo_id)
|
||||
|
||||
if not repository:
|
||||
raise HTTPException(status_code=404, detail="Repository not found")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"data": repository
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error getting repository {repo_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.put("/{repo_id}/credentials")
|
||||
async def update_credentials(
|
||||
repo_id: str,
|
||||
request: UpdateCredentialsRequest
|
||||
) -> Dict[str, Any]:
|
||||
"""Update git credentials for a repository"""
|
||||
try:
|
||||
logger.info(f"🔐 Updating credentials for repository: {repo_id}")
|
||||
|
||||
# Check if repository exists
|
||||
repo = await git_repository_service.get_repository(repo_id)
|
||||
if not repo:
|
||||
raise HTTPException(status_code=404, detail="Repository not found")
|
||||
|
||||
# Update credentials in the repository object
|
||||
if repo_id in git_repository_service.repositories:
|
||||
credentials_dict = request.credentials.dict()
|
||||
from ..services.git_repository_service import GitCredentials
|
||||
|
||||
git_repo = git_repository_service.repositories[repo_id]
|
||||
git_repo.credentials = GitCredentials(
|
||||
repo_url=git_repo.url,
|
||||
**credentials_dict
|
||||
)
|
||||
|
||||
await git_repository_service._save_repositories()
|
||||
|
||||
logger.info(f"✅ Credentials updated for repository: {repo_id}")
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Credentials updated successfully"
|
||||
}
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Repository not found")
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error updating credentials for repository {repo_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/{repo_id}/update")
|
||||
async def update_repository(repo_id: str) -> Dict[str, Any]:
|
||||
"""Pull latest changes from repository"""
|
||||
try:
|
||||
logger.info(f"🔄 Updating repository: {repo_id}")
|
||||
|
||||
result = await git_repository_service.update_repository(repo_id)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"✅ Repository {repo_id} updated successfully")
|
||||
else:
|
||||
logger.error(f"❌ Failed to update repository {repo_id}: {result.get('error')}")
|
||||
|
||||
return {
|
||||
"success": result["success"],
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error updating repository {repo_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.delete("/{repo_id}")
|
||||
async def remove_repository(repo_id: str) -> Dict[str, Any]:
|
||||
"""Remove a git repository"""
|
||||
try:
|
||||
logger.info(f"🗑️ Removing repository: {repo_id}")
|
||||
|
||||
result = await git_repository_service.remove_repository(repo_id)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"✅ Repository {repo_id} removed successfully")
|
||||
else:
|
||||
logger.error(f"❌ Failed to remove repository {repo_id}: {result.get('error')}")
|
||||
|
||||
return {
|
||||
"success": result["success"],
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error removing repository {repo_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/{repo_id}/files")
|
||||
async def get_repository_files(
|
||||
repo_id: str,
|
||||
path: str = Query("", description="Directory path within repository"),
|
||||
max_depth: int = Query(2, description="Maximum directory depth to scan")
|
||||
) -> Dict[str, Any]:
|
||||
"""Get file structure of a repository"""
|
||||
try:
|
||||
logger.info(f"📁 Getting files for repository: {repo_id}, path: {path}")
|
||||
|
||||
result = await git_repository_service.get_repository_files(
|
||||
repo_id=repo_id,
|
||||
path=path,
|
||||
max_depth=max_depth
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"✅ Files retrieved for repository {repo_id}")
|
||||
else:
|
||||
logger.error(f"❌ Failed to get files for repository {repo_id}: {result.get('error')}")
|
||||
|
||||
return {
|
||||
"success": result["success"],
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error getting files for repository {repo_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/{repo_id}/files/content")
|
||||
async def get_file_content(
|
||||
repo_id: str,
|
||||
file_path: str = Query(..., description="Path to file within repository"),
|
||||
max_size: int = Query(1024*1024, description="Maximum file size in bytes")
|
||||
) -> Dict[str, Any]:
|
||||
"""Get content of a specific file in the repository"""
|
||||
try:
|
||||
logger.info(f"📄 Getting file content: {repo_id}/{file_path}")
|
||||
|
||||
result = await git_repository_service.get_file_content(
|
||||
repo_id=repo_id,
|
||||
file_path=file_path,
|
||||
max_size=max_size
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"✅ File content retrieved: {repo_id}/{file_path}")
|
||||
else:
|
||||
logger.error(f"❌ Failed to get file content {repo_id}/{file_path}: {result.get('error')}")
|
||||
|
||||
return {
|
||||
"success": result["success"],
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error getting file content {repo_id}/{file_path}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/{repo_id}/status")
|
||||
async def get_repository_status(repo_id: str) -> Dict[str, Any]:
|
||||
"""Get current status of a repository (cloning, ready, error, etc.)"""
|
||||
try:
|
||||
logger.info(f"📊 Getting status for repository: {repo_id}")
|
||||
|
||||
repository = await git_repository_service.get_repository(repo_id)
|
||||
|
||||
if not repository:
|
||||
raise HTTPException(status_code=404, detail="Repository not found")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"data": {
|
||||
"repository_id": repo_id,
|
||||
"name": repository["name"],
|
||||
"status": repository["status"],
|
||||
"last_updated": repository.get("last_updated"),
|
||||
"commit_hash": repository.get("commit_hash"),
|
||||
"commit_message": repository.get("commit_message"),
|
||||
"error_message": repository.get("error_message")
|
||||
}
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error getting status for repository {repo_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Health check for the git repository service
|
||||
@router.get("/health/check")
|
||||
async def health_check() -> Dict[str, Any]:
|
||||
"""Health check for git repository service"""
|
||||
try:
|
||||
return {
|
||||
"success": True,
|
||||
"service": "git_repositories",
|
||||
"status": "healthy",
|
||||
"repositories_count": len(git_repository_service.repositories)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Health check failed: {e}")
|
||||
return {
|
||||
"success": False,
|
||||
"service": "git_repositories",
|
||||
"status": "unhealthy",
|
||||
"error": str(e)
|
||||
}
|
||||
515
backend/app/api/members.py
Normal file
515
backend/app/api/members.py
Normal file
@@ -0,0 +1,515 @@
|
||||
"""
|
||||
Member Management API for WHOOSH - Handles project member invitations, roles, and collaboration.
|
||||
"""
|
||||
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
||||
from pydantic import BaseModel, Field, EmailStr
|
||||
from typing import List, Dict, Optional, Any
|
||||
from datetime import datetime
|
||||
|
||||
from app.services.member_service import MemberService
|
||||
from app.services.project_service import ProjectService
|
||||
from app.services.age_service import AgeService
|
||||
from app.core.auth_deps import get_current_user_context
|
||||
|
||||
router = APIRouter(prefix="/api/members", tags=["member-management"])
|
||||
|
||||
# Pydantic models for request/response validation
|
||||
|
||||
class MemberInviteRequest(BaseModel):
|
||||
project_id: str = Field(..., min_length=1, max_length=100)
|
||||
member_email: EmailStr
|
||||
role: str = Field(..., pattern="^(owner|maintainer|developer|viewer)$")
|
||||
custom_message: Optional[str] = Field(None, max_length=1000)
|
||||
send_email: bool = True
|
||||
include_age_key: bool = True
|
||||
|
||||
class MemberInviteResponse(BaseModel):
|
||||
success: bool
|
||||
invitation_id: Optional[str] = None
|
||||
invitation_url: Optional[str] = None
|
||||
member_email: str
|
||||
role: str
|
||||
expires_at: Optional[str] = None
|
||||
email_sent: bool = False
|
||||
error: Optional[str] = None
|
||||
|
||||
class InvitationAcceptRequest(BaseModel):
|
||||
invitation_token: str
|
||||
accepter_name: str = Field(..., min_length=1, max_length=100)
|
||||
accepter_username: Optional[str] = Field(None, max_length=50)
|
||||
gitea_username: Optional[str] = Field(None, max_length=50)
|
||||
setup_preferences: Optional[Dict[str, Any]] = None
|
||||
|
||||
class InvitationAcceptResponse(BaseModel):
|
||||
success: bool
|
||||
member_email: str
|
||||
role: str
|
||||
project_id: str
|
||||
project_name: str
|
||||
gitea_access: Optional[Dict[str, Any]] = None
|
||||
age_access: Optional[Dict[str, Any]] = None
|
||||
permissions: List[str]
|
||||
next_steps: List[str]
|
||||
error: Optional[str] = None
|
||||
|
||||
class ProjectMemberInfo(BaseModel):
|
||||
email: str
|
||||
role: str
|
||||
status: str
|
||||
invited_at: str
|
||||
invited_by: str
|
||||
accepted_at: Optional[str] = None
|
||||
permissions: List[str]
|
||||
gitea_access: bool = False
|
||||
age_access: bool = False
|
||||
|
||||
class MemberRoleUpdateRequest(BaseModel):
|
||||
member_email: EmailStr
|
||||
new_role: str = Field(..., pattern="^(owner|maintainer|developer|viewer)$")
|
||||
reason: Optional[str] = Field(None, max_length=500)
|
||||
|
||||
class MemberRemovalRequest(BaseModel):
|
||||
member_email: EmailStr
|
||||
reason: Optional[str] = Field(None, max_length=500)
|
||||
|
||||
def get_member_service():
|
||||
"""Dependency injection for member service."""
|
||||
return MemberService()
|
||||
|
||||
def get_project_service():
|
||||
"""Dependency injection for project service."""
|
||||
return ProjectService()
|
||||
|
||||
def get_age_service():
|
||||
"""Dependency injection for Age service."""
|
||||
return AgeService()
|
||||
|
||||
@router.post("/invite", response_model=MemberInviteResponse)
|
||||
async def invite_member(
|
||||
request: MemberInviteRequest,
|
||||
background_tasks: BackgroundTasks,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context),
|
||||
member_service: MemberService = Depends(get_member_service),
|
||||
project_service: ProjectService = Depends(get_project_service),
|
||||
age_service: AgeService = Depends(get_age_service)
|
||||
):
|
||||
"""Invite a new member to join a project."""
|
||||
try:
|
||||
# Verify project exists and user has permission to invite
|
||||
project = project_service.get_project_by_id(request.project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
# TODO: Check if current user has permission to invite members
|
||||
# For now, assume permission is granted
|
||||
|
||||
inviter_name = current_user.get("name", "WHOOSH User")
|
||||
project_name = project.get("name", request.project_id)
|
||||
|
||||
# Generate invitation
|
||||
invitation_result = member_service.generate_member_invitation(
|
||||
project_id=request.project_id,
|
||||
member_email=request.member_email,
|
||||
role=request.role,
|
||||
inviter_name=inviter_name,
|
||||
project_name=project_name,
|
||||
custom_message=request.custom_message
|
||||
)
|
||||
|
||||
if not invitation_result.get("created"):
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=invitation_result.get("error", "Failed to create invitation")
|
||||
)
|
||||
|
||||
# Send email invitation if requested
|
||||
email_sent = False
|
||||
if request.send_email:
|
||||
# Get Age public key if requested
|
||||
age_public_key = None
|
||||
if request.include_age_key:
|
||||
try:
|
||||
project_keys = age_service.list_project_keys(request.project_id)
|
||||
if project_keys:
|
||||
age_public_key = project_keys[0]["public_key"]
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not retrieve Age key: {e}")
|
||||
|
||||
# Send email in background
|
||||
background_tasks.add_task(
|
||||
member_service.send_email_invitation,
|
||||
invitation_result,
|
||||
age_public_key
|
||||
)
|
||||
email_sent = True
|
||||
|
||||
return MemberInviteResponse(
|
||||
success=True,
|
||||
invitation_id=invitation_result["invitation_id"],
|
||||
invitation_url=invitation_result["invitation_url"],
|
||||
member_email=request.member_email,
|
||||
role=request.role,
|
||||
expires_at=invitation_result["expires_at"],
|
||||
email_sent=email_sent
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to invite member: {str(e)}")
|
||||
|
||||
@router.get("/invitations/{invitation_id}")
|
||||
async def get_invitation_details(
|
||||
invitation_id: str,
|
||||
member_service: MemberService = Depends(get_member_service)
|
||||
):
|
||||
"""Get invitation details for verification and display."""
|
||||
try:
|
||||
invitation_status = member_service.get_invitation_status(invitation_id)
|
||||
if not invitation_status:
|
||||
raise HTTPException(status_code=404, detail="Invitation not found")
|
||||
|
||||
return invitation_status
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to retrieve invitation: {str(e)}")
|
||||
|
||||
@router.post("/invitations/{invitation_id}/accept", response_model=InvitationAcceptResponse)
|
||||
async def accept_invitation(
|
||||
invitation_id: str,
|
||||
request: InvitationAcceptRequest,
|
||||
member_service: MemberService = Depends(get_member_service)
|
||||
):
|
||||
"""Accept a project invitation and set up member access."""
|
||||
try:
|
||||
# Validate invitation token first
|
||||
if not member_service.validate_invitation_token(invitation_id, request.invitation_token):
|
||||
raise HTTPException(status_code=401, detail="Invalid invitation token")
|
||||
|
||||
# Prepare accepter data
|
||||
accepter_data = {
|
||||
"name": request.accepter_name,
|
||||
"username": request.accepter_username,
|
||||
"gitea_username": request.gitea_username or request.accepter_username,
|
||||
"setup_preferences": request.setup_preferences or {},
|
||||
"accepted_via": "whoosh_api"
|
||||
}
|
||||
|
||||
# Process acceptance
|
||||
result = member_service.accept_invitation(
|
||||
invitation_id=invitation_id,
|
||||
invitation_token=request.invitation_token,
|
||||
accepter_data=accepter_data
|
||||
)
|
||||
|
||||
if not result.get("success"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=result.get("error", "Failed to accept invitation")
|
||||
)
|
||||
|
||||
return InvitationAcceptResponse(
|
||||
success=True,
|
||||
member_email=result["member_email"],
|
||||
role=result["role"],
|
||||
project_id=result["project_id"],
|
||||
project_name=result["project_name"],
|
||||
gitea_access=result.get("gitea_access"),
|
||||
age_access=result.get("age_access"),
|
||||
permissions=result["permissions"],
|
||||
next_steps=result["next_steps"]
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to accept invitation: {str(e)}")
|
||||
|
||||
@router.get("/projects/{project_id}", response_model=List[ProjectMemberInfo])
|
||||
async def list_project_members(
|
||||
project_id: str,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context),
|
||||
member_service: MemberService = Depends(get_member_service),
|
||||
project_service: ProjectService = Depends(get_project_service)
|
||||
):
|
||||
"""List all members of a project with their roles and status."""
|
||||
try:
|
||||
# Verify project exists and user has permission to view members
|
||||
project = project_service.get_project_by_id(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
# TODO: Check if current user has permission to view members
|
||||
# For now, assume permission is granted
|
||||
|
||||
members = member_service.list_project_members(project_id)
|
||||
|
||||
# Convert to response format
|
||||
member_info_list = []
|
||||
for member in members:
|
||||
member_info = ProjectMemberInfo(
|
||||
email=member["email"],
|
||||
role=member["role"],
|
||||
status=member["status"],
|
||||
invited_at=member["invited_at"],
|
||||
invited_by=member["invited_by"],
|
||||
accepted_at=member.get("accepted_at"),
|
||||
permissions=member["permissions"],
|
||||
gitea_access=member["status"] == "accepted",
|
||||
age_access=member["role"] in ["owner", "maintainer", "developer"]
|
||||
)
|
||||
member_info_list.append(member_info)
|
||||
|
||||
return member_info_list
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list members: {str(e)}")
|
||||
|
||||
@router.put("/projects/{project_id}/members/role")
|
||||
async def update_member_role(
|
||||
project_id: str,
|
||||
request: MemberRoleUpdateRequest,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context),
|
||||
member_service: MemberService = Depends(get_member_service),
|
||||
project_service: ProjectService = Depends(get_project_service)
|
||||
):
|
||||
"""Update a member's role in the project."""
|
||||
try:
|
||||
# Verify project exists and user has permission to manage members
|
||||
project = project_service.get_project_by_id(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
# TODO: Implement role updates
|
||||
# This would involve updating the member's invitation record and
|
||||
# updating their permissions in GITEA and Age access
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Member role update functionality coming soon",
|
||||
"member_email": request.member_email,
|
||||
"new_role": request.new_role
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to update member role: {str(e)}")
|
||||
|
||||
@router.delete("/projects/{project_id}/members")
|
||||
async def remove_member(
|
||||
project_id: str,
|
||||
request: MemberRemovalRequest,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context),
|
||||
member_service: MemberService = Depends(get_member_service),
|
||||
project_service: ProjectService = Depends(get_project_service)
|
||||
):
|
||||
"""Remove a member from the project."""
|
||||
try:
|
||||
# Verify project exists and user has permission to remove members
|
||||
project = project_service.get_project_by_id(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
# TODO: Check if current user has permission to remove members
|
||||
# For now, assume permission is granted
|
||||
|
||||
current_user_name = current_user.get("name", "WHOOSH User")
|
||||
|
||||
# Revoke member access
|
||||
result = member_service.revoke_member_access(
|
||||
project_id=project_id,
|
||||
member_email=request.member_email,
|
||||
revoked_by=current_user_name,
|
||||
reason=request.reason or "No reason provided"
|
||||
)
|
||||
|
||||
if not result.get("success"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=result.get("error", "Failed to remove member")
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Member access revoked successfully",
|
||||
"member_email": request.member_email,
|
||||
"revoked_by": current_user_name
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to remove member: {str(e)}")
|
||||
|
||||
@router.get("/projects/{project_id}/invitations")
|
||||
async def list_project_invitations(
|
||||
project_id: str,
|
||||
status: Optional[str] = None, # Filter by status: pending, accepted, revoked, expired
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context),
|
||||
member_service: MemberService = Depends(get_member_service),
|
||||
project_service: ProjectService = Depends(get_project_service)
|
||||
):
|
||||
"""List all invitations for a project with optional status filtering."""
|
||||
try:
|
||||
# Verify project exists and user has permission to view invitations
|
||||
project = project_service.get_project_by_id(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
# Get all members (which includes invitation data)
|
||||
members = member_service.list_project_members(project_id)
|
||||
|
||||
# Filter by status if requested
|
||||
if status:
|
||||
members = [member for member in members if member["status"] == status]
|
||||
|
||||
# Add expiration status
|
||||
for member in members:
|
||||
if member["status"] == "pending":
|
||||
# Check if invitation is expired (this would need expiration date from invitation)
|
||||
member["is_expired"] = False # Placeholder
|
||||
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"invitations": members,
|
||||
"count": len(members),
|
||||
"filtered_by_status": status
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list invitations: {str(e)}")
|
||||
|
||||
@router.post("/projects/{project_id}/invitations/{invitation_id}/resend")
|
||||
async def resend_invitation(
|
||||
project_id: str,
|
||||
invitation_id: str,
|
||||
background_tasks: BackgroundTasks,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context),
|
||||
member_service: MemberService = Depends(get_member_service),
|
||||
age_service: AgeService = Depends(get_age_service)
|
||||
):
|
||||
"""Resend an invitation email to a member."""
|
||||
try:
|
||||
# Load invitation to verify it exists and is pending
|
||||
invitation_status = member_service.get_invitation_status(invitation_id)
|
||||
if not invitation_status:
|
||||
raise HTTPException(status_code=404, detail="Invitation not found")
|
||||
|
||||
if invitation_status["project_id"] != project_id:
|
||||
raise HTTPException(status_code=400, detail="Invitation does not belong to this project")
|
||||
|
||||
if invitation_status["status"] != "pending":
|
||||
raise HTTPException(status_code=400, detail="Can only resend pending invitations")
|
||||
|
||||
if invitation_status["is_expired"]:
|
||||
raise HTTPException(status_code=400, detail="Cannot resend expired invitation")
|
||||
|
||||
# Get Age public key for the project
|
||||
age_public_key = None
|
||||
try:
|
||||
project_keys = age_service.list_project_keys(project_id)
|
||||
if project_keys:
|
||||
age_public_key = project_keys[0]["public_key"]
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not retrieve Age key: {e}")
|
||||
|
||||
# Resend invitation email in background
|
||||
invitation_data = {
|
||||
"invitation_id": invitation_id,
|
||||
"project_name": invitation_status["project_name"],
|
||||
"member_email": invitation_status["member_email"],
|
||||
"role": invitation_status["role"],
|
||||
"inviter_name": current_user.get("name", "WHOOSH User"),
|
||||
"invitation_url": f"/invite/{invitation_id}?token={invitation_status.get('invitation_token', '')}",
|
||||
"expires_at": invitation_status["expires_at"],
|
||||
"permissions": [] # Would need to get from stored invitation
|
||||
}
|
||||
|
||||
background_tasks.add_task(
|
||||
member_service.send_email_invitation,
|
||||
invitation_data,
|
||||
age_public_key
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Invitation email resent successfully",
|
||||
"invitation_id": invitation_id,
|
||||
"member_email": invitation_status["member_email"]
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to resend invitation: {str(e)}")
|
||||
|
||||
# === Member Dashboard and Profile Endpoints ===
|
||||
|
||||
@router.get("/profile")
|
||||
async def get_member_profile(
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context),
|
||||
member_service: MemberService = Depends(get_member_service)
|
||||
):
|
||||
"""Get current member's profile and project memberships."""
|
||||
try:
|
||||
# TODO: Implement member profile lookup across all projects
|
||||
# This would involve searching through all invitations/memberships
|
||||
|
||||
user_email = current_user.get("email", "")
|
||||
|
||||
return {
|
||||
"member_email": user_email,
|
||||
"name": current_user.get("name", ""),
|
||||
"projects": [], # Placeholder for projects this member belongs to
|
||||
"total_projects": 0,
|
||||
"active_invitations": 0,
|
||||
"roles": {} # Mapping of project_id to role
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get member profile: {str(e)}")
|
||||
|
||||
@router.get("/projects/{project_id}/permissions")
|
||||
async def get_member_permissions(
|
||||
project_id: str,
|
||||
member_email: Optional[str] = None, # If not provided, use current user
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context),
|
||||
member_service: MemberService = Depends(get_member_service)
|
||||
):
|
||||
"""Get detailed permissions for a member in a specific project."""
|
||||
try:
|
||||
target_email = member_email or current_user.get("email", "")
|
||||
|
||||
# Get project members to find this member's role
|
||||
members = member_service.list_project_members(project_id)
|
||||
member_info = None
|
||||
|
||||
for member in members:
|
||||
if member["email"] == target_email:
|
||||
member_info = member
|
||||
break
|
||||
|
||||
if not member_info:
|
||||
raise HTTPException(status_code=404, detail="Member not found in project")
|
||||
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"member_email": target_email,
|
||||
"role": member_info["role"],
|
||||
"status": member_info["status"],
|
||||
"permissions": member_info["permissions"],
|
||||
"can_access_gitea": member_info["status"] == "accepted",
|
||||
"can_decrypt_age": member_info["role"] in ["owner", "maintainer", "developer"]
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get member permissions: {str(e)}")
|
||||
598
backend/app/api/project_setup.py
Normal file
598
backend/app/api/project_setup.py
Normal file
@@ -0,0 +1,598 @@
|
||||
"""
|
||||
Project Setup API for WHOOSH - Comprehensive project creation with GITEA integration.
|
||||
"""
|
||||
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import List, Dict, Optional, Any
|
||||
from datetime import datetime
|
||||
import asyncio
|
||||
|
||||
from app.services.gitea_service import GiteaService
|
||||
from app.services.project_service import ProjectService
|
||||
from app.services.age_service import AgeService
|
||||
from app.services.member_service import MemberService
|
||||
from app.models.project import Project
|
||||
|
||||
router = APIRouter(prefix="/api/project-setup", tags=["project-setup"])
|
||||
|
||||
# Pydantic models for request/response validation
|
||||
|
||||
class ProjectTemplateConfig(BaseModel):
|
||||
template_id: str
|
||||
name: str
|
||||
description: str
|
||||
icon: str
|
||||
features: List[str]
|
||||
starter_files: Dict[str, Any] = {}
|
||||
|
||||
class AgeKeyConfig(BaseModel):
|
||||
generate_new_key: bool = True
|
||||
master_key_passphrase: Optional[str] = None
|
||||
key_backup_location: Optional[str] = None
|
||||
key_recovery_questions: Optional[List[Dict[str, str]]] = None
|
||||
|
||||
class GitConfig(BaseModel):
|
||||
repo_type: str = Field(..., pattern="^(new|existing|import)$")
|
||||
repo_name: Optional[str] = None
|
||||
git_url: Optional[str] = None
|
||||
git_owner: Optional[str] = None
|
||||
git_branch: str = "main"
|
||||
auto_initialize: bool = True
|
||||
add_gitignore: bool = True
|
||||
add_readme: bool = True
|
||||
license_type: Optional[str] = "MIT"
|
||||
private: bool = False
|
||||
|
||||
class ProjectMember(BaseModel):
|
||||
email: str
|
||||
role: str = Field(..., pattern="^(owner|maintainer|developer|viewer)$")
|
||||
age_public_key: Optional[str] = None
|
||||
invite_message: Optional[str] = None
|
||||
|
||||
class MemberConfig(BaseModel):
|
||||
initial_members: List[ProjectMember] = []
|
||||
role_permissions: Dict[str, List[str]] = {
|
||||
"owner": ["all"],
|
||||
"maintainer": ["read", "write", "deploy"],
|
||||
"developer": ["read", "write"],
|
||||
"viewer": ["read"]
|
||||
}
|
||||
|
||||
class BzzzSyncPreferences(BaseModel):
|
||||
real_time: bool = True
|
||||
conflict_resolution: str = Field("manual", pattern="^(manual|automatic|priority)$")
|
||||
backup_frequency: str = Field("hourly", pattern="^(real-time|hourly|daily)$")
|
||||
|
||||
class BzzzConfig(BaseModel):
|
||||
enable_bzzz: bool = False
|
||||
network_peers: Optional[List[str]] = None
|
||||
auto_discovery: bool = True
|
||||
task_coordination: bool = True
|
||||
ai_agent_access: bool = False
|
||||
sync_preferences: BzzzSyncPreferences = BzzzSyncPreferences()
|
||||
|
||||
class AdvancedConfig(BaseModel):
|
||||
project_visibility: str = Field("private", pattern="^(private|internal|public)$")
|
||||
security_level: str = Field("standard", pattern="^(standard|high|maximum)$")
|
||||
backup_enabled: bool = True
|
||||
monitoring_enabled: bool = True
|
||||
ci_cd_enabled: bool = False
|
||||
custom_workflows: Optional[List[str]] = None
|
||||
|
||||
class ProjectSetupRequest(BaseModel):
|
||||
# Basic Information
|
||||
name: str = Field(..., min_length=1, max_length=100)
|
||||
description: Optional[str] = Field(None, max_length=500)
|
||||
tags: Optional[List[str]] = None
|
||||
template_id: Optional[str] = None
|
||||
|
||||
# Configuration sections
|
||||
age_config: AgeKeyConfig = AgeKeyConfig()
|
||||
git_config: GitConfig
|
||||
member_config: MemberConfig = MemberConfig()
|
||||
bzzz_config: BzzzConfig = BzzzConfig()
|
||||
advanced_config: AdvancedConfig = AdvancedConfig()
|
||||
|
||||
class ProjectSetupStatus(BaseModel):
|
||||
step: str
|
||||
status: str = Field(..., pattern="^(pending|in_progress|completed|failed)$")
|
||||
message: str
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
|
||||
class ProjectSetupResponse(BaseModel):
|
||||
project_id: str
|
||||
status: str
|
||||
progress: List[ProjectSetupStatus]
|
||||
repository: Optional[Dict[str, Any]] = None
|
||||
age_keys: Optional[Dict[str, str]] = None
|
||||
member_invitations: Optional[List[Dict[str, str]]] = None
|
||||
next_steps: List[str]
|
||||
|
||||
# Project templates configuration
|
||||
PROJECT_TEMPLATES = {
|
||||
"full-stack": ProjectTemplateConfig(
|
||||
template_id="full-stack",
|
||||
name="Full-Stack Application",
|
||||
description="Complete web application with frontend, backend, and database",
|
||||
icon="🌐",
|
||||
features=["React/Vue", "Node.js/Python", "Database", "CI/CD"],
|
||||
starter_files={
|
||||
"frontend": {"package.json": {}, "src/index.js": ""},
|
||||
"backend": {"requirements.txt": "", "app.py": ""},
|
||||
"docker-compose.yml": {},
|
||||
".github/workflows/ci.yml": {}
|
||||
}
|
||||
),
|
||||
"ai-research": ProjectTemplateConfig(
|
||||
template_id="ai-research",
|
||||
name="AI Research Project",
|
||||
description="Machine learning and AI development workspace",
|
||||
icon="🤖",
|
||||
features=["Jupyter", "Python", "GPU Support", "Data Pipeline"],
|
||||
starter_files={
|
||||
"notebooks": {},
|
||||
"src": {},
|
||||
"data": {},
|
||||
"models": {},
|
||||
"requirements.txt": "",
|
||||
"environment.yml": {}
|
||||
}
|
||||
),
|
||||
"documentation": ProjectTemplateConfig(
|
||||
template_id="documentation",
|
||||
name="Documentation Site",
|
||||
description="Technical documentation and knowledge base",
|
||||
icon="📚",
|
||||
features=["Markdown", "Static Site", "Search", "Multi-language"],
|
||||
starter_files={
|
||||
"docs": {},
|
||||
"mkdocs.yml": {},
|
||||
".readthedocs.yml": {}
|
||||
}
|
||||
),
|
||||
"mobile-app": ProjectTemplateConfig(
|
||||
template_id="mobile-app",
|
||||
name="Mobile Application",
|
||||
description="Cross-platform mobile app development",
|
||||
icon="📱",
|
||||
features=["React Native", "Flutter", "Push Notifications", "App Store"],
|
||||
starter_files={
|
||||
"src": {},
|
||||
"assets": {},
|
||||
"package.json": {},
|
||||
"app.json": {}
|
||||
}
|
||||
),
|
||||
"data-science": ProjectTemplateConfig(
|
||||
template_id="data-science",
|
||||
name="Data Science",
|
||||
description="Data analysis and visualization project",
|
||||
icon="📊",
|
||||
features=["Python", "R", "Visualization", "Reports"],
|
||||
starter_files={
|
||||
"data": {},
|
||||
"notebooks": {},
|
||||
"src": {},
|
||||
"reports": {},
|
||||
"requirements.txt": {}
|
||||
}
|
||||
),
|
||||
"empty": ProjectTemplateConfig(
|
||||
template_id="empty",
|
||||
name="Empty Project",
|
||||
description="Start from scratch with minimal setup",
|
||||
icon="📁",
|
||||
features=["Git", "Basic Structure", "README"],
|
||||
starter_files={
|
||||
"README.md": "",
|
||||
".gitignore": ""
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
def get_gitea_service():
|
||||
"""Dependency injection for GITEA service."""
|
||||
return GiteaService()
|
||||
|
||||
def get_project_service():
|
||||
"""Dependency injection for project service."""
|
||||
return ProjectService()
|
||||
|
||||
def get_age_service():
|
||||
"""Dependency injection for Age service."""
|
||||
return AgeService()
|
||||
|
||||
def get_member_service():
|
||||
"""Dependency injection for Member service."""
|
||||
return MemberService()
|
||||
|
||||
@router.get("/templates")
|
||||
async def get_project_templates() -> Dict[str, Any]:
|
||||
"""Get available project templates."""
|
||||
return {
|
||||
"templates": list(PROJECT_TEMPLATES.values()),
|
||||
"count": len(PROJECT_TEMPLATES)
|
||||
}
|
||||
|
||||
@router.get("/templates/{template_id}")
|
||||
async def get_project_template(template_id: str) -> ProjectTemplateConfig:
|
||||
"""Get specific project template details."""
|
||||
if template_id not in PROJECT_TEMPLATES:
|
||||
raise HTTPException(status_code=404, detail="Template not found")
|
||||
|
||||
return PROJECT_TEMPLATES[template_id]
|
||||
|
||||
@router.post("/validate-repository")
|
||||
async def validate_repository(
|
||||
owner: str,
|
||||
repo_name: str,
|
||||
gitea_service: GiteaService = Depends(get_gitea_service)
|
||||
) -> Dict[str, Any]:
|
||||
"""Validate repository access and BZZZ readiness."""
|
||||
return gitea_service.validate_repository_access(owner, repo_name)
|
||||
|
||||
@router.post("/create")
|
||||
async def create_project(
|
||||
request: ProjectSetupRequest,
|
||||
background_tasks: BackgroundTasks,
|
||||
gitea_service: GiteaService = Depends(get_gitea_service),
|
||||
project_service: ProjectService = Depends(get_project_service),
|
||||
age_service: AgeService = Depends(get_age_service),
|
||||
member_service: MemberService = Depends(get_member_service)
|
||||
) -> ProjectSetupResponse:
|
||||
"""Create a new project with comprehensive setup."""
|
||||
|
||||
project_id = request.name.lower().replace(" ", "-").replace("_", "-")
|
||||
|
||||
# Initialize setup progress tracking
|
||||
progress = [
|
||||
ProjectSetupStatus(step="validation", status="pending", message="Validating project configuration"),
|
||||
ProjectSetupStatus(step="age_keys", status="pending", message="Setting up Age master keys"),
|
||||
ProjectSetupStatus(step="git_repository", status="pending", message="Creating Git repository"),
|
||||
ProjectSetupStatus(step="bzzz_setup", status="pending", message="Configuring BZZZ integration"),
|
||||
ProjectSetupStatus(step="member_invites", status="pending", message="Sending member invitations"),
|
||||
ProjectSetupStatus(step="finalization", status="pending", message="Finalizing project setup")
|
||||
]
|
||||
|
||||
try:
|
||||
# Step 1: Validation
|
||||
progress[0].status = "in_progress"
|
||||
progress[0].message = "Validating project name and configuration"
|
||||
|
||||
# Check if project name is available
|
||||
existing_project = project_service.get_project_by_id(project_id)
|
||||
if existing_project:
|
||||
progress[0].status = "failed"
|
||||
progress[0].message = f"Project '{project_id}' already exists"
|
||||
raise HTTPException(status_code=409, detail="Project name already exists")
|
||||
|
||||
progress[0].status = "completed"
|
||||
progress[0].message = "Validation completed"
|
||||
|
||||
# Step 2: Age Keys Setup
|
||||
progress[1].status = "in_progress"
|
||||
age_keys = None
|
||||
|
||||
if request.age_config.generate_new_key:
|
||||
progress[1].message = "Generating Age master key pair"
|
||||
age_keys = await generate_age_keys(project_id, request.age_config, age_service)
|
||||
|
||||
if age_keys:
|
||||
progress[1].status = "completed"
|
||||
progress[1].message = f"Age master keys generated (Key ID: {age_keys['key_id']})"
|
||||
progress[1].details = {
|
||||
"key_id": age_keys["key_id"],
|
||||
"public_key": age_keys["public_key"],
|
||||
"encrypted": age_keys["encrypted"],
|
||||
"backup_created": age_keys.get("backup_created", False)
|
||||
}
|
||||
else:
|
||||
progress[1].status = "failed"
|
||||
progress[1].message = "Age key generation failed"
|
||||
raise HTTPException(status_code=500, detail="Age key generation failed")
|
||||
else:
|
||||
progress[1].status = "completed"
|
||||
progress[1].message = "Skipped Age key generation"
|
||||
|
||||
# Step 3: Git Repository Setup
|
||||
progress[2].status = "in_progress"
|
||||
repository_info = None
|
||||
|
||||
if request.git_config.repo_type == "new":
|
||||
progress[2].message = "Creating new Git repository"
|
||||
|
||||
# Prepare repository data
|
||||
repo_data = {
|
||||
"name": request.git_config.repo_name or project_id,
|
||||
"description": request.description or f"WHOOSH project: {request.name}",
|
||||
"owner": request.git_config.git_owner or "whoosh",
|
||||
"private": request.git_config.private
|
||||
}
|
||||
|
||||
repository_info = gitea_service.setup_project_repository(repo_data)
|
||||
|
||||
if repository_info:
|
||||
progress[2].status = "completed"
|
||||
progress[2].message = f"Repository created: {repository_info['gitea_url']}"
|
||||
progress[2].details = repository_info
|
||||
else:
|
||||
progress[2].status = "failed"
|
||||
progress[2].message = "Failed to create Git repository"
|
||||
raise HTTPException(status_code=500, detail="Repository creation failed")
|
||||
|
||||
elif request.git_config.repo_type == "existing":
|
||||
progress[2].message = "Validating existing repository"
|
||||
|
||||
validation = gitea_service.validate_repository_access(
|
||||
request.git_config.git_owner,
|
||||
request.git_config.repo_name
|
||||
)
|
||||
|
||||
if validation["accessible"]:
|
||||
repository_info = {
|
||||
"repository": validation["repository"],
|
||||
"gitea_url": f"{gitea_service.gitea_base_url}/{request.git_config.git_owner}/{request.git_config.repo_name}",
|
||||
"bzzz_enabled": validation["bzzz_ready"]
|
||||
}
|
||||
progress[2].status = "completed"
|
||||
progress[2].message = "Existing repository validated"
|
||||
else:
|
||||
progress[2].status = "failed"
|
||||
progress[2].message = f"Repository validation failed: {validation.get('error', 'Unknown error')}"
|
||||
raise HTTPException(status_code=400, detail="Repository validation failed")
|
||||
|
||||
# Step 4: BZZZ Setup
|
||||
progress[3].status = "in_progress"
|
||||
|
||||
if request.bzzz_config.enable_bzzz:
|
||||
progress[3].message = "Configuring BZZZ task coordination"
|
||||
|
||||
# Ensure BZZZ labels are set up
|
||||
if repository_info and request.git_config.repo_type == "new":
|
||||
# Labels already set up during repository creation
|
||||
pass
|
||||
elif repository_info:
|
||||
# Set up labels for existing repository
|
||||
gitea_service._setup_bzzz_labels(
|
||||
request.git_config.git_owner,
|
||||
request.git_config.repo_name
|
||||
)
|
||||
|
||||
progress[3].status = "completed"
|
||||
progress[3].message = "BZZZ integration configured"
|
||||
else:
|
||||
progress[3].status = "completed"
|
||||
progress[3].message = "BZZZ integration disabled"
|
||||
|
||||
# Step 5: Member Invitations
|
||||
progress[4].status = "in_progress"
|
||||
member_invitations = []
|
||||
|
||||
if request.member_config.initial_members:
|
||||
progress[4].message = f"Sending invitations to {len(request.member_config.initial_members)} members"
|
||||
|
||||
# Get Age public key for invitations
|
||||
age_public_key = None
|
||||
if age_keys:
|
||||
age_public_key = age_keys.get("public_key")
|
||||
|
||||
for member in request.member_config.initial_members:
|
||||
invitation = await send_member_invitation(
|
||||
project_id, member, repository_info, member_service,
|
||||
request.name, age_public_key
|
||||
)
|
||||
member_invitations.append(invitation)
|
||||
|
||||
progress[4].status = "completed"
|
||||
progress[4].message = f"Sent {len(member_invitations)} member invitations"
|
||||
else:
|
||||
progress[4].status = "completed"
|
||||
progress[4].message = "No member invitations to send"
|
||||
|
||||
# Step 6: Finalization
|
||||
progress[5].status = "in_progress"
|
||||
progress[5].message = "Creating project record"
|
||||
|
||||
# Create project in database
|
||||
project_data = {
|
||||
"name": request.name,
|
||||
"description": request.description,
|
||||
"tags": request.tags,
|
||||
"git_url": repository_info.get("gitea_url") if repository_info else None,
|
||||
"git_owner": request.git_config.git_owner,
|
||||
"git_repository": request.git_config.repo_name or project_id,
|
||||
"git_branch": request.git_config.git_branch,
|
||||
"bzzz_enabled": request.bzzz_config.enable_bzzz,
|
||||
"private_repo": request.git_config.private,
|
||||
"metadata": {
|
||||
"template_id": request.template_id,
|
||||
"security_level": request.advanced_config.security_level,
|
||||
"created_via": "whoosh_setup_wizard",
|
||||
"age_keys_enabled": request.age_config.generate_new_key,
|
||||
"member_count": len(request.member_config.initial_members)
|
||||
}
|
||||
}
|
||||
|
||||
created_project = project_service.create_project(project_data)
|
||||
|
||||
progress[5].status = "completed"
|
||||
progress[5].message = "Project setup completed successfully"
|
||||
|
||||
# Generate next steps
|
||||
next_steps = []
|
||||
if repository_info:
|
||||
next_steps.append(f"Clone repository: git clone {repository_info['repository']['clone_url']}")
|
||||
if request.bzzz_config.enable_bzzz:
|
||||
next_steps.append("Create BZZZ tasks by adding issues with 'bzzz-task' label")
|
||||
if member_invitations:
|
||||
next_steps.append("Follow up on member invitation responses")
|
||||
next_steps.append("Configure project settings and workflows")
|
||||
|
||||
return ProjectSetupResponse(
|
||||
project_id=project_id,
|
||||
status="completed",
|
||||
progress=progress,
|
||||
repository=repository_info,
|
||||
age_keys=age_keys,
|
||||
member_invitations=member_invitations,
|
||||
next_steps=next_steps
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
# Update progress with error
|
||||
for step in progress:
|
||||
if step.status == "in_progress":
|
||||
step.status = "failed"
|
||||
step.message = f"Error: {str(e)}"
|
||||
break
|
||||
|
||||
raise HTTPException(status_code=500, detail=f"Project setup failed: {str(e)}")
|
||||
|
||||
async def generate_age_keys(project_id: str, age_config: AgeKeyConfig, age_service: AgeService) -> Optional[Dict[str, str]]:
|
||||
"""Generate Age master key pair using the Age service."""
|
||||
try:
|
||||
result = age_service.generate_master_key_pair(
|
||||
project_id=project_id,
|
||||
passphrase=age_config.master_key_passphrase
|
||||
)
|
||||
|
||||
# Create backup if location specified
|
||||
if age_config.key_backup_location:
|
||||
backup_success = age_service.backup_key(
|
||||
project_id=project_id,
|
||||
key_id=result["key_id"],
|
||||
backup_location=age_config.key_backup_location
|
||||
)
|
||||
result["backup_created"] = backup_success
|
||||
|
||||
# Generate recovery phrase
|
||||
recovery_phrase = age_service.generate_recovery_phrase(
|
||||
project_id=project_id,
|
||||
key_id=result["key_id"]
|
||||
)
|
||||
result["recovery_phrase"] = recovery_phrase
|
||||
|
||||
return {
|
||||
"key_id": result["key_id"],
|
||||
"public_key": result["public_key"],
|
||||
"private_key_stored": result["private_key_stored"],
|
||||
"backup_location": result["backup_location"],
|
||||
"recovery_phrase": recovery_phrase,
|
||||
"encrypted": result["encrypted"]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f"Age key generation failed: {e}")
|
||||
return None
|
||||
|
||||
async def send_member_invitation(project_id: str, member: ProjectMember, repository_info: Optional[Dict],
|
||||
member_service: MemberService, project_name: str, age_public_key: Optional[str] = None) -> Dict[str, str]:
|
||||
"""Send invitation to project member using the member service."""
|
||||
try:
|
||||
# Generate invitation
|
||||
invitation_result = member_service.generate_member_invitation(
|
||||
project_id=project_id,
|
||||
member_email=member.email,
|
||||
role=member.role,
|
||||
inviter_name="WHOOSH Project Setup",
|
||||
project_name=project_name,
|
||||
custom_message=member.invite_message
|
||||
)
|
||||
|
||||
if not invitation_result.get("created"):
|
||||
return {
|
||||
"email": member.email,
|
||||
"role": member.role,
|
||||
"invitation_sent": False,
|
||||
"error": invitation_result.get("error", "Failed to create invitation")
|
||||
}
|
||||
|
||||
# Send email invitation
|
||||
email_sent = member_service.send_email_invitation(invitation_result, age_public_key)
|
||||
|
||||
return {
|
||||
"email": member.email,
|
||||
"role": member.role,
|
||||
"invitation_sent": email_sent,
|
||||
"invitation_id": invitation_result["invitation_id"],
|
||||
"invitation_url": invitation_result["invitation_url"],
|
||||
"expires_at": invitation_result["expires_at"]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
return {
|
||||
"email": member.email,
|
||||
"role": member.role,
|
||||
"invitation_sent": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
# === Age Key Management Endpoints ===
|
||||
|
||||
@router.get("/age-keys/{project_id}")
|
||||
async def get_project_age_keys(
|
||||
project_id: str,
|
||||
age_service: AgeService = Depends(get_age_service)
|
||||
) -> Dict[str, Any]:
|
||||
"""Get Age keys for a project."""
|
||||
try:
|
||||
keys = age_service.list_project_keys(project_id)
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"keys": keys,
|
||||
"count": len(keys)
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to retrieve Age keys: {str(e)}")
|
||||
|
||||
@router.post("/age-keys/{project_id}/validate")
|
||||
async def validate_age_key_access(
|
||||
project_id: str,
|
||||
key_id: str,
|
||||
age_service: AgeService = Depends(get_age_service)
|
||||
) -> Dict[str, Any]:
|
||||
"""Validate access to an Age key."""
|
||||
try:
|
||||
validation = age_service.validate_key_access(project_id, key_id)
|
||||
return validation
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Key validation failed: {str(e)}")
|
||||
|
||||
@router.post("/age-keys/{project_id}/backup")
|
||||
async def backup_age_key(
|
||||
project_id: str,
|
||||
key_id: str,
|
||||
backup_location: str,
|
||||
age_service: AgeService = Depends(get_age_service)
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a backup of an Age key."""
|
||||
try:
|
||||
success = age_service.backup_key(project_id, key_id, backup_location)
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"key_id": key_id,
|
||||
"backup_location": backup_location,
|
||||
"success": success
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Key backup failed: {str(e)}")
|
||||
|
||||
@router.post("/age-keys/{project_id}/encrypt")
|
||||
async def encrypt_data_with_age(
|
||||
project_id: str,
|
||||
data: str,
|
||||
recipients: List[str],
|
||||
age_service: AgeService = Depends(get_age_service)
|
||||
) -> Dict[str, Any]:
|
||||
"""Encrypt data using Age with specified recipients."""
|
||||
try:
|
||||
encrypted_data = age_service.encrypt_data(data, recipients)
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"encrypted_data": encrypted_data,
|
||||
"recipients": recipients
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Data encryption failed: {str(e)}")
|
||||
@@ -99,7 +99,7 @@ async def get_bzzz_project_tasks(project_id: str) -> List[Dict[str, Any]]:
|
||||
|
||||
@bzzz_router.post("/projects/{project_id}/claim")
|
||||
async def claim_bzzz_task(project_id: str, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Register task claim with Hive system."""
|
||||
"""Register task claim with WHOOSH system."""
|
||||
try:
|
||||
task_number = task_data.get("task_number")
|
||||
agent_id = task_data.get("agent_id")
|
||||
@@ -114,7 +114,7 @@ async def claim_bzzz_task(project_id: str, task_data: Dict[str, Any]) -> Dict[st
|
||||
|
||||
@bzzz_router.put("/projects/{project_id}/status")
|
||||
async def update_bzzz_task_status(project_id: str, status_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Update task status in Hive system."""
|
||||
"""Update task status in WHOOSH system."""
|
||||
try:
|
||||
task_number = status_data.get("task_number")
|
||||
status = status_data.get("status")
|
||||
|
||||
@@ -152,7 +152,7 @@ async def update_repository_config(
|
||||
if "ready_to_claim" in config_data:
|
||||
project.ready_to_claim = config_data["ready_to_claim"]
|
||||
|
||||
if "status" in config_data and config_data["status"] in ["active", "inactive", "archived"]:
|
||||
if "status" in config_data and config_data["status"] in ["active", "inactive", "arcwhooshd"]:
|
||||
project.status = config_data["status"]
|
||||
|
||||
db.commit()
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""
|
||||
Hive API - Task Management Endpoints
|
||||
WHOOSH API - Task Management Endpoints
|
||||
|
||||
This module provides comprehensive API endpoints for managing development tasks
|
||||
in the Hive distributed orchestration platform. It handles task creation,
|
||||
in the WHOOSH distributed orchestration platform. It handles task creation,
|
||||
execution tracking, and lifecycle management across multiple agents.
|
||||
|
||||
Key Features:
|
||||
@@ -35,7 +35,7 @@ from ..core.error_handlers import (
|
||||
task_not_found_error,
|
||||
coordinator_unavailable_error,
|
||||
validation_error,
|
||||
HiveAPIException
|
||||
WHOOSHAPIException
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
@@ -52,7 +52,7 @@ def get_coordinator() -> UnifiedCoordinator:
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
summary="Create a new development task",
|
||||
description="""
|
||||
Create and submit a new development task to the Hive cluster for execution.
|
||||
Create and submit a new development task to the WHOOSH cluster for execution.
|
||||
|
||||
This endpoint allows you to submit various types of development tasks that will be
|
||||
automatically assigned to the most suitable agent based on specialization and availability.
|
||||
@@ -506,7 +506,7 @@ async def cancel_task(
|
||||
# Check if task can be cancelled
|
||||
current_status = task.get("status")
|
||||
if current_status in ["completed", "failed", "cancelled"]:
|
||||
raise HiveAPIException(
|
||||
raise WHOOSHAPIException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"Task '{task_id}' cannot be cancelled (status: {current_status})",
|
||||
error_code="TASK_CANNOT_BE_CANCELLED",
|
||||
|
||||
504
backend/app/api/templates.py
Normal file
504
backend/app/api/templates.py
Normal file
@@ -0,0 +1,504 @@
|
||||
"""
|
||||
Project Template API for WHOOSH - Advanced project template management.
|
||||
"""
|
||||
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import List, Dict, Optional, Any
|
||||
from datetime import datetime
|
||||
import tempfile
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from app.services.template_service import ProjectTemplateService
|
||||
from app.services.gitea_service import GiteaService
|
||||
from app.core.auth_deps import get_current_user_context
|
||||
|
||||
router = APIRouter(prefix="/api/templates", tags=["project-templates"])
|
||||
|
||||
# Pydantic models for request/response validation
|
||||
|
||||
class TemplateInfo(BaseModel):
|
||||
template_id: str
|
||||
name: str
|
||||
description: str
|
||||
icon: str
|
||||
category: str
|
||||
tags: List[str]
|
||||
difficulty: str
|
||||
estimated_setup_time: str
|
||||
features: List[str]
|
||||
tech_stack: Dict[str, List[str]]
|
||||
requirements: Optional[Dict[str, str]] = None
|
||||
|
||||
class TemplateListResponse(BaseModel):
|
||||
templates: List[TemplateInfo]
|
||||
categories: List[str]
|
||||
total_count: int
|
||||
|
||||
class TemplateDetailResponse(BaseModel):
|
||||
metadata: TemplateInfo
|
||||
starter_files: Dict[str, str]
|
||||
file_structure: List[str]
|
||||
|
||||
class ProjectFromTemplateRequest(BaseModel):
|
||||
template_id: str
|
||||
project_name: str = Field(..., min_length=1, max_length=100)
|
||||
project_description: Optional[str] = Field(None, max_length=500)
|
||||
author_name: Optional[str] = Field(None, max_length=100)
|
||||
custom_variables: Optional[Dict[str, str]] = None
|
||||
create_repository: bool = True
|
||||
repository_private: bool = False
|
||||
|
||||
class ProjectFromTemplateResponse(BaseModel):
|
||||
success: bool
|
||||
project_id: str
|
||||
template_id: str
|
||||
files_created: List[str]
|
||||
repository_url: Optional[str] = None
|
||||
next_steps: List[str]
|
||||
setup_time: str
|
||||
error: Optional[str] = None
|
||||
|
||||
class TemplateValidationRequest(BaseModel):
|
||||
template_id: str
|
||||
project_variables: Dict[str, str]
|
||||
|
||||
class TemplateValidationResponse(BaseModel):
|
||||
valid: bool
|
||||
missing_requirements: List[str]
|
||||
warnings: List[str]
|
||||
estimated_size: str
|
||||
|
||||
def get_template_service():
|
||||
"""Dependency injection for template service."""
|
||||
return ProjectTemplateService()
|
||||
|
||||
def get_gitea_service():
|
||||
"""Dependency injection for GITEA service."""
|
||||
return GiteaService()
|
||||
|
||||
@router.get("/", response_model=TemplateListResponse)
|
||||
async def list_templates(
|
||||
category: Optional[str] = None,
|
||||
tag: Optional[str] = None,
|
||||
difficulty: Optional[str] = None,
|
||||
template_service: ProjectTemplateService = Depends(get_template_service)
|
||||
):
|
||||
"""List all available project templates with optional filtering."""
|
||||
try:
|
||||
templates = template_service.list_templates()
|
||||
|
||||
# Apply filters
|
||||
if category:
|
||||
templates = [t for t in templates if t.get("category") == category]
|
||||
|
||||
if tag:
|
||||
templates = [t for t in templates if tag in t.get("tags", [])]
|
||||
|
||||
if difficulty:
|
||||
templates = [t for t in templates if t.get("difficulty") == difficulty]
|
||||
|
||||
# Extract unique categories for filter options
|
||||
all_templates = template_service.list_templates()
|
||||
categories = list(set(t.get("category", "other") for t in all_templates))
|
||||
|
||||
# Convert to response format
|
||||
template_infos = []
|
||||
for template in templates:
|
||||
template_info = TemplateInfo(
|
||||
template_id=template["template_id"],
|
||||
name=template["name"],
|
||||
description=template["description"],
|
||||
icon=template["icon"],
|
||||
category=template.get("category", "other"),
|
||||
tags=template.get("tags", []),
|
||||
difficulty=template.get("difficulty", "beginner"),
|
||||
estimated_setup_time=template.get("estimated_setup_time", "5-10 minutes"),
|
||||
features=template.get("features", []),
|
||||
tech_stack=template.get("tech_stack", {}),
|
||||
requirements=template.get("requirements")
|
||||
)
|
||||
template_infos.append(template_info)
|
||||
|
||||
return TemplateListResponse(
|
||||
templates=template_infos,
|
||||
categories=sorted(categories),
|
||||
total_count=len(template_infos)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list templates: {str(e)}")
|
||||
|
||||
@router.get("/{template_id}", response_model=TemplateDetailResponse)
|
||||
async def get_template_details(
|
||||
template_id: str,
|
||||
template_service: ProjectTemplateService = Depends(get_template_service)
|
||||
):
|
||||
"""Get detailed information about a specific template including files."""
|
||||
try:
|
||||
template = template_service.get_template(template_id)
|
||||
if not template:
|
||||
raise HTTPException(status_code=404, detail=f"Template '{template_id}' not found")
|
||||
|
||||
metadata = template["metadata"]
|
||||
starter_files = template["starter_files"]
|
||||
|
||||
# Create file structure list
|
||||
file_structure = sorted(starter_files.keys())
|
||||
|
||||
template_info = TemplateInfo(
|
||||
template_id=metadata["template_id"],
|
||||
name=metadata["name"],
|
||||
description=metadata["description"],
|
||||
icon=metadata["icon"],
|
||||
category=metadata.get("category", "other"),
|
||||
tags=metadata.get("tags", []),
|
||||
difficulty=metadata.get("difficulty", "beginner"),
|
||||
estimated_setup_time=metadata.get("estimated_setup_time", "5-10 minutes"),
|
||||
features=metadata.get("features", []),
|
||||
tech_stack=metadata.get("tech_stack", {}),
|
||||
requirements=metadata.get("requirements")
|
||||
)
|
||||
|
||||
return TemplateDetailResponse(
|
||||
metadata=template_info,
|
||||
starter_files=starter_files,
|
||||
file_structure=file_structure
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get template details: {str(e)}")
|
||||
|
||||
@router.post("/validate", response_model=TemplateValidationResponse)
|
||||
async def validate_template_setup(
|
||||
request: TemplateValidationRequest,
|
||||
template_service: ProjectTemplateService = Depends(get_template_service)
|
||||
):
|
||||
"""Validate template requirements and project variables before creation."""
|
||||
try:
|
||||
template = template_service.get_template(request.template_id)
|
||||
if not template:
|
||||
raise HTTPException(status_code=404, detail=f"Template '{request.template_id}' not found")
|
||||
|
||||
metadata = template["metadata"]
|
||||
requirements = metadata.get("requirements", {})
|
||||
|
||||
# Check for missing requirements
|
||||
missing_requirements = []
|
||||
for req_name, req_version in requirements.items():
|
||||
# This would check system requirements in a real implementation
|
||||
# For now, we'll simulate the check
|
||||
if req_name in ["docker", "nodejs", "python"]:
|
||||
# Assume these are available
|
||||
pass
|
||||
else:
|
||||
missing_requirements.append(f"{req_name} {req_version}")
|
||||
|
||||
# Generate warnings
|
||||
warnings = []
|
||||
if metadata.get("difficulty") == "advanced":
|
||||
warnings.append("This is an advanced template requiring significant setup time")
|
||||
|
||||
if len(template["starter_files"]) > 50:
|
||||
warnings.append("This template creates many files and may take longer to set up")
|
||||
|
||||
# Estimate project size
|
||||
total_files = len(template["starter_files"])
|
||||
if total_files < 10:
|
||||
estimated_size = "Small (< 10 files)"
|
||||
elif total_files < 30:
|
||||
estimated_size = "Medium (10-30 files)"
|
||||
else:
|
||||
estimated_size = "Large (30+ files)"
|
||||
|
||||
return TemplateValidationResponse(
|
||||
valid=len(missing_requirements) == 0,
|
||||
missing_requirements=missing_requirements,
|
||||
warnings=warnings,
|
||||
estimated_size=estimated_size
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Template validation failed: {str(e)}")
|
||||
|
||||
@router.post("/create-project", response_model=ProjectFromTemplateResponse)
|
||||
async def create_project_from_template(
|
||||
request: ProjectFromTemplateRequest,
|
||||
background_tasks: BackgroundTasks,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_context),
|
||||
template_service: ProjectTemplateService = Depends(get_template_service),
|
||||
gitea_service: GiteaService = Depends(get_gitea_service)
|
||||
):
|
||||
"""Create a new project from a template with optional GITEA repository creation."""
|
||||
start_time = datetime.now()
|
||||
|
||||
try:
|
||||
# Validate template exists
|
||||
template = template_service.get_template(request.template_id)
|
||||
if not template:
|
||||
raise HTTPException(status_code=404, detail=f"Template '{request.template_id}' not found")
|
||||
|
||||
# Prepare project variables
|
||||
project_variables = {
|
||||
"project_name": request.project_name,
|
||||
"project_description": request.project_description or "",
|
||||
"author_name": request.author_name or current_user.get("name", "WHOOSH User"),
|
||||
**(request.custom_variables or {})
|
||||
}
|
||||
|
||||
# Create temporary directory for project files
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
# Generate project from template
|
||||
result = template_service.create_project_from_template(
|
||||
request.template_id,
|
||||
project_variables,
|
||||
temp_dir
|
||||
)
|
||||
|
||||
repository_url = None
|
||||
|
||||
# Create GITEA repository if requested
|
||||
if request.create_repository:
|
||||
try:
|
||||
repo_name = request.project_name.lower().replace(" ", "-").replace("_", "-")
|
||||
repo_info = gitea_service.create_repository(
|
||||
owner="whoosh", # Default organization
|
||||
repo_name=repo_name,
|
||||
description=request.project_description or f"Project created from {template['metadata']['name']} template",
|
||||
private=request.repository_private,
|
||||
auto_init=True
|
||||
)
|
||||
|
||||
if repo_info:
|
||||
repository_url = repo_info.get("html_url")
|
||||
|
||||
# TODO: Upload generated files to repository
|
||||
# This would require git operations to push the template files
|
||||
# to the newly created repository
|
||||
|
||||
else:
|
||||
# Repository creation failed, but continue with project creation
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
print(f"Warning: Repository creation failed: {e}")
|
||||
# Continue without repository
|
||||
|
||||
# Calculate setup time
|
||||
setup_time = str(datetime.now() - start_time)
|
||||
|
||||
# Generate project ID
|
||||
project_id = f"proj_{request.project_name.lower().replace(' ', '_')}_{int(start_time.timestamp())}"
|
||||
|
||||
# Get next steps from template
|
||||
next_steps = template["metadata"].get("next_steps", [
|
||||
"Review the generated project structure",
|
||||
"Install dependencies as specified in requirements files",
|
||||
"Configure environment variables",
|
||||
"Run initial setup scripts",
|
||||
"Start development server"
|
||||
])
|
||||
|
||||
# Add repository-specific next steps
|
||||
if repository_url:
|
||||
next_steps.insert(0, f"Clone your repository: git clone {repository_url}")
|
||||
next_steps.append("Commit and push your initial changes")
|
||||
|
||||
return ProjectFromTemplateResponse(
|
||||
success=True,
|
||||
project_id=project_id,
|
||||
template_id=request.template_id,
|
||||
files_created=result["files_created"],
|
||||
repository_url=repository_url,
|
||||
next_steps=next_steps,
|
||||
setup_time=setup_time
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
setup_time = str(datetime.now() - start_time)
|
||||
return ProjectFromTemplateResponse(
|
||||
success=False,
|
||||
project_id="",
|
||||
template_id=request.template_id,
|
||||
files_created=[],
|
||||
repository_url=None,
|
||||
next_steps=[],
|
||||
setup_time=setup_time,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
@router.get("/categories", response_model=List[str])
|
||||
async def get_template_categories(
|
||||
template_service: ProjectTemplateService = Depends(get_template_service)
|
||||
):
|
||||
"""Get all available template categories."""
|
||||
try:
|
||||
templates = template_service.list_templates()
|
||||
categories = list(set(t.get("category", "other") for t in templates))
|
||||
return sorted(categories)
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get categories: {str(e)}")
|
||||
|
||||
@router.get("/tags", response_model=List[str])
|
||||
async def get_template_tags(
|
||||
template_service: ProjectTemplateService = Depends(get_template_service)
|
||||
):
|
||||
"""Get all available template tags."""
|
||||
try:
|
||||
templates = template_service.list_templates()
|
||||
all_tags = []
|
||||
for template in templates:
|
||||
all_tags.extend(template.get("tags", []))
|
||||
|
||||
# Remove duplicates and sort
|
||||
unique_tags = sorted(list(set(all_tags)))
|
||||
return unique_tags
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get tags: {str(e)}")
|
||||
|
||||
@router.get("/{template_id}/preview", response_model=Dict[str, Any])
|
||||
async def preview_template_files(
|
||||
template_id: str,
|
||||
file_path: Optional[str] = None,
|
||||
template_service: ProjectTemplateService = Depends(get_template_service)
|
||||
):
|
||||
"""Preview template files or get file structure."""
|
||||
try:
|
||||
template = template_service.get_template(template_id)
|
||||
if not template:
|
||||
raise HTTPException(status_code=404, detail=f"Template '{template_id}' not found")
|
||||
|
||||
if file_path:
|
||||
# Return specific file content
|
||||
starter_files = template["starter_files"]
|
||||
if file_path not in starter_files:
|
||||
raise HTTPException(status_code=404, detail=f"File '{file_path}' not found in template")
|
||||
|
||||
return {
|
||||
"file_path": file_path,
|
||||
"content": starter_files[file_path],
|
||||
"size": len(starter_files[file_path]),
|
||||
"type": "text" if file_path.endswith(('.txt', '.md', '.py', '.js', '.ts', '.json', '.yml', '.yaml')) else "binary"
|
||||
}
|
||||
else:
|
||||
# Return file structure overview
|
||||
starter_files = template["starter_files"]
|
||||
file_structure = {}
|
||||
|
||||
for file_path in sorted(starter_files.keys()):
|
||||
parts = Path(file_path).parts
|
||||
current = file_structure
|
||||
|
||||
for part in parts[:-1]:
|
||||
if part not in current:
|
||||
current[part] = {}
|
||||
current = current[part]
|
||||
|
||||
# Add file with metadata
|
||||
filename = parts[-1]
|
||||
current[filename] = {
|
||||
"type": "file",
|
||||
"size": len(starter_files[file_path]),
|
||||
"extension": Path(file_path).suffix
|
||||
}
|
||||
|
||||
return {
|
||||
"template_id": template_id,
|
||||
"file_structure": file_structure,
|
||||
"total_files": len(starter_files),
|
||||
"total_size": sum(len(content) for content in starter_files.values())
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to preview template: {str(e)}")
|
||||
|
||||
@router.post("/{template_id}/download")
|
||||
async def download_template_archive(
|
||||
template_id: str,
|
||||
template_service: ProjectTemplateService = Depends(get_template_service)
|
||||
):
|
||||
"""Download template as a ZIP archive."""
|
||||
try:
|
||||
template = template_service.get_template(template_id)
|
||||
if not template:
|
||||
raise HTTPException(status_code=404, detail=f"Template '{template_id}' not found")
|
||||
|
||||
# Create temporary ZIP file
|
||||
with tempfile.NamedTemporaryFile(suffix=".zip", delete=False) as temp_zip:
|
||||
import zipfile
|
||||
|
||||
with zipfile.ZipFile(temp_zip.name, 'w', zipfile.ZIP_DEFLATED) as zf:
|
||||
# Add template metadata
|
||||
zf.writestr("template.json", json.dumps(template["metadata"], indent=2))
|
||||
|
||||
# Add all starter files
|
||||
for file_path, content in template["starter_files"].items():
|
||||
zf.writestr(file_path, content)
|
||||
|
||||
# Return file for download
|
||||
from fastapi.responses import FileResponse
|
||||
return FileResponse(
|
||||
temp_zip.name,
|
||||
media_type="application/zip",
|
||||
filename=f"{template_id}-template.zip"
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to download template: {str(e)}")
|
||||
|
||||
# Template Statistics and Analytics
|
||||
|
||||
@router.get("/stats/overview")
|
||||
async def get_template_statistics(
|
||||
template_service: ProjectTemplateService = Depends(get_template_service)
|
||||
):
|
||||
"""Get overview statistics about available templates."""
|
||||
try:
|
||||
templates = template_service.list_templates()
|
||||
|
||||
# Calculate statistics
|
||||
total_templates = len(templates)
|
||||
categories = {}
|
||||
difficulties = {}
|
||||
tech_stacks = {}
|
||||
|
||||
for template in templates:
|
||||
# Count categories
|
||||
category = template.get("category", "other")
|
||||
categories[category] = categories.get(category, 0) + 1
|
||||
|
||||
# Count difficulties
|
||||
difficulty = template.get("difficulty", "beginner")
|
||||
difficulties[difficulty] = difficulties.get(difficulty, 0) + 1
|
||||
|
||||
# Count tech stack components
|
||||
tech_stack = template.get("tech_stack", {})
|
||||
for category, technologies in tech_stack.items():
|
||||
for tech in technologies:
|
||||
tech_stacks[tech] = tech_stacks.get(tech, 0) + 1
|
||||
|
||||
# Get most popular technologies
|
||||
popular_tech = sorted(tech_stacks.items(), key=lambda x: x[1], reverse=True)[:10]
|
||||
|
||||
return {
|
||||
"total_templates": total_templates,
|
||||
"categories": categories,
|
||||
"difficulties": difficulties,
|
||||
"popular_technologies": dict(popular_tech),
|
||||
"average_features_per_template": sum(len(t.get("features", [])) for t in templates) / total_templates if templates else 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get template statistics: {str(e)}")
|
||||
395
backend/app/api/ucxl_integration.py
Normal file
395
backend/app/api/ucxl_integration.py
Normal file
@@ -0,0 +1,395 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
UCXL Integration API for WHOOSH
|
||||
API endpoints for distributed artifact storage, retrieval, and temporal navigation
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends, Query, UploadFile, File
|
||||
from typing import Dict, List, Optional, Any, Union
|
||||
from pydantic import BaseModel, Field
|
||||
from datetime import datetime
|
||||
|
||||
from ..services.ucxl_integration_service import ucxl_service, UCXLAddress
|
||||
from ..core.auth_deps import get_current_user
|
||||
from ..models.user import User
|
||||
|
||||
router = APIRouter(prefix="/api/ucxl", tags=["UCXL Integration"])
|
||||
|
||||
# Pydantic models for API requests/responses
|
||||
|
||||
class StoreArtifactRequest(BaseModel):
|
||||
project: str = Field(..., description="Project name")
|
||||
component: str = Field(..., description="Component name")
|
||||
path: str = Field(..., description="Artifact path")
|
||||
content: str = Field(..., description="Artifact content")
|
||||
content_type: str = Field("text/plain", description="Content MIME type")
|
||||
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional metadata")
|
||||
|
||||
class StoreArtifactResponse(BaseModel):
|
||||
address: str
|
||||
success: bool
|
||||
message: str
|
||||
|
||||
class ArtifactInfo(BaseModel):
|
||||
address: str
|
||||
content_hash: str
|
||||
content_type: str
|
||||
size: int
|
||||
created_at: str
|
||||
modified_at: str
|
||||
metadata: Dict[str, Any]
|
||||
cached: Optional[bool] = None
|
||||
|
||||
class CreateProjectContextRequest(BaseModel):
|
||||
project_name: str = Field(..., description="Project name")
|
||||
description: str = Field(..., description="Project description")
|
||||
components: List[str] = Field(..., description="List of project components")
|
||||
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional project metadata")
|
||||
|
||||
class LinkArtifactsRequest(BaseModel):
|
||||
source_address: str = Field(..., description="Source UCXL address")
|
||||
target_address: str = Field(..., description="Target UCXL address")
|
||||
relationship: str = Field(..., description="Relationship type (e.g., 'depends_on', 'implements', 'tests')")
|
||||
metadata: Optional[Dict[str, Any]] = Field(None, description="Link metadata")
|
||||
|
||||
class SystemStatusResponse(BaseModel):
|
||||
ucxl_endpoints: int
|
||||
dht_nodes: int
|
||||
bzzz_gateways: int
|
||||
cached_artifacts: int
|
||||
cache_limit: int
|
||||
system_health: float
|
||||
last_update: str
|
||||
|
||||
@router.get("/status", response_model=SystemStatusResponse)
|
||||
async def get_ucxl_status(
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> SystemStatusResponse:
|
||||
"""Get UCXL integration system status"""
|
||||
try:
|
||||
status = await ucxl_service.get_system_status()
|
||||
return SystemStatusResponse(**status)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get UCXL status: {str(e)}")
|
||||
|
||||
@router.post("/artifacts", response_model=StoreArtifactResponse)
|
||||
async def store_artifact(
|
||||
request: StoreArtifactRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> StoreArtifactResponse:
|
||||
"""
|
||||
Store an artifact in the distributed UCXL system
|
||||
"""
|
||||
try:
|
||||
address = await ucxl_service.store_artifact(
|
||||
project=request.project,
|
||||
component=request.component,
|
||||
path=request.path,
|
||||
content=request.content,
|
||||
content_type=request.content_type,
|
||||
metadata=request.metadata
|
||||
)
|
||||
|
||||
if address:
|
||||
return StoreArtifactResponse(
|
||||
address=address,
|
||||
success=True,
|
||||
message="Artifact stored successfully"
|
||||
)
|
||||
else:
|
||||
raise HTTPException(status_code=500, detail="Failed to store artifact")
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to store artifact: {str(e)}")
|
||||
|
||||
@router.post("/artifacts/upload", response_model=StoreArtifactResponse)
|
||||
async def upload_artifact(
|
||||
project: str,
|
||||
component: str,
|
||||
path: str,
|
||||
file: UploadFile = File(...),
|
||||
metadata: Optional[str] = None,
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> StoreArtifactResponse:
|
||||
"""
|
||||
Upload and store a file artifact in the distributed UCXL system
|
||||
"""
|
||||
try:
|
||||
# Read file content
|
||||
content = await file.read()
|
||||
|
||||
# Parse metadata if provided
|
||||
file_metadata = {}
|
||||
if metadata:
|
||||
import json
|
||||
file_metadata = json.loads(metadata)
|
||||
|
||||
# Add file info to metadata
|
||||
file_metadata.update({
|
||||
"original_filename": file.filename,
|
||||
"uploaded_by": current_user.username,
|
||||
"upload_timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
address = await ucxl_service.store_artifact(
|
||||
project=project,
|
||||
component=component,
|
||||
path=path,
|
||||
content=content,
|
||||
content_type=file.content_type or "application/octet-stream",
|
||||
metadata=file_metadata
|
||||
)
|
||||
|
||||
if address:
|
||||
return StoreArtifactResponse(
|
||||
address=address,
|
||||
success=True,
|
||||
message=f"File '{file.filename}' uploaded successfully"
|
||||
)
|
||||
else:
|
||||
raise HTTPException(status_code=500, detail="Failed to upload file")
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to upload file: {str(e)}")
|
||||
|
||||
@router.get("/artifacts/{address:path}", response_model=Optional[ArtifactInfo])
|
||||
async def retrieve_artifact(
|
||||
address: str,
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> Optional[ArtifactInfo]:
|
||||
"""
|
||||
Retrieve an artifact from the distributed UCXL system
|
||||
"""
|
||||
try:
|
||||
# Decode URL-encoded address
|
||||
import urllib.parse
|
||||
decoded_address = urllib.parse.unquote(address)
|
||||
|
||||
data = await ucxl_service.retrieve_artifact(decoded_address)
|
||||
|
||||
if data:
|
||||
return ArtifactInfo(**data)
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail=f"Artifact not found: {decoded_address}")
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to retrieve artifact: {str(e)}")
|
||||
|
||||
@router.get("/artifacts", response_model=List[ArtifactInfo])
|
||||
async def list_artifacts(
|
||||
project: Optional[str] = Query(None, description="Filter by project"),
|
||||
component: Optional[str] = Query(None, description="Filter by component"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Maximum number of artifacts to return"),
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> List[ArtifactInfo]:
|
||||
"""
|
||||
List artifacts from the distributed UCXL system
|
||||
"""
|
||||
try:
|
||||
artifacts = await ucxl_service.list_artifacts(
|
||||
project=project,
|
||||
component=component,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return [ArtifactInfo(**artifact) for artifact in artifacts]
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list artifacts: {str(e)}")
|
||||
|
||||
@router.get("/artifacts/{address:path}/temporal", response_model=Optional[ArtifactInfo])
|
||||
async def resolve_temporal_artifact(
|
||||
address: str,
|
||||
timestamp: Optional[str] = Query(None, description="ISO timestamp for temporal resolution"),
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> Optional[ArtifactInfo]:
|
||||
"""
|
||||
Resolve a UCXL address at a specific point in time using temporal navigation
|
||||
"""
|
||||
try:
|
||||
# Decode URL-encoded address
|
||||
import urllib.parse
|
||||
decoded_address = urllib.parse.unquote(address)
|
||||
|
||||
# Parse timestamp if provided
|
||||
target_time = None
|
||||
if timestamp:
|
||||
target_time = datetime.fromisoformat(timestamp)
|
||||
|
||||
data = await ucxl_service.resolve_temporal_address(decoded_address, target_time)
|
||||
|
||||
if data:
|
||||
return ArtifactInfo(**data)
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail=f"Artifact not found at specified time: {decoded_address}")
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to resolve temporal artifact: {str(e)}")
|
||||
|
||||
@router.post("/projects", response_model=Dict[str, str])
|
||||
async def create_project_context(
|
||||
request: CreateProjectContextRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> Dict[str, str]:
|
||||
"""
|
||||
Create a project context in the UCXL system
|
||||
"""
|
||||
try:
|
||||
address = await ucxl_service.create_project_context(
|
||||
project_name=request.project_name,
|
||||
description=request.description,
|
||||
components=request.components,
|
||||
metadata=request.metadata
|
||||
)
|
||||
|
||||
if address:
|
||||
return {
|
||||
"address": address,
|
||||
"project_name": request.project_name,
|
||||
"status": "created"
|
||||
}
|
||||
else:
|
||||
raise HTTPException(status_code=500, detail="Failed to create project context")
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create project context: {str(e)}")
|
||||
|
||||
@router.post("/links", response_model=Dict[str, str])
|
||||
async def link_artifacts(
|
||||
request: LinkArtifactsRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> Dict[str, str]:
|
||||
"""
|
||||
Create a relationship link between two UCXL artifacts
|
||||
"""
|
||||
try:
|
||||
success = await ucxl_service.link_artifacts(
|
||||
source_address=request.source_address,
|
||||
target_address=request.target_address,
|
||||
relationship=request.relationship,
|
||||
metadata=request.metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
return {
|
||||
"status": "linked",
|
||||
"source": request.source_address,
|
||||
"target": request.target_address,
|
||||
"relationship": request.relationship
|
||||
}
|
||||
else:
|
||||
raise HTTPException(status_code=500, detail="Failed to create artifact link")
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to link artifacts: {str(e)}")
|
||||
|
||||
@router.get("/artifacts/{address:path}/links", response_model=List[Dict[str, Any]])
|
||||
async def get_artifact_links(
|
||||
address: str,
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all links involving a specific artifact
|
||||
"""
|
||||
try:
|
||||
# Decode URL-encoded address
|
||||
import urllib.parse
|
||||
decoded_address = urllib.parse.unquote(address)
|
||||
|
||||
links = await ucxl_service.get_artifact_links(decoded_address)
|
||||
return links
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get artifact links: {str(e)}")
|
||||
|
||||
@router.get("/addresses/parse", response_model=Dict[str, Any])
|
||||
async def parse_ucxl_address(
|
||||
address: str = Query(..., description="UCXL address to parse"),
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Parse a UCXL address into its components
|
||||
"""
|
||||
try:
|
||||
ucxl_addr = UCXLAddress.parse(address)
|
||||
|
||||
return {
|
||||
"original": address,
|
||||
"protocol": ucxl_addr.protocol.value,
|
||||
"user": ucxl_addr.user,
|
||||
"password": "***" if ucxl_addr.password else None, # Hide password
|
||||
"project": ucxl_addr.project,
|
||||
"component": ucxl_addr.component,
|
||||
"path": ucxl_addr.path,
|
||||
"reconstructed": ucxl_addr.to_string()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UCXL address: {str(e)}")
|
||||
|
||||
@router.get("/addresses/generate", response_model=Dict[str, str])
|
||||
async def generate_ucxl_address(
|
||||
project: str = Query(..., description="Project name"),
|
||||
component: str = Query(..., description="Component name"),
|
||||
path: str = Query(..., description="Artifact path"),
|
||||
user: Optional[str] = Query(None, description="User name"),
|
||||
secure: bool = Query(False, description="Use secure protocol (ucxls)"),
|
||||
current_user: User = Depends(get_current_user)
|
||||
) -> Dict[str, str]:
|
||||
"""
|
||||
Generate a UCXL address from components
|
||||
"""
|
||||
try:
|
||||
from ..services.ucxl_integration_service import UCXLProtocol
|
||||
|
||||
ucxl_addr = UCXLAddress(
|
||||
protocol=UCXLProtocol.UCXL_SECURE if secure else UCXLProtocol.UCXL,
|
||||
user=user,
|
||||
project=project,
|
||||
component=component,
|
||||
path=path
|
||||
)
|
||||
|
||||
return {
|
||||
"address": ucxl_addr.to_string(),
|
||||
"project": project,
|
||||
"component": component,
|
||||
"path": path
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Failed to generate address: {str(e)}")
|
||||
|
||||
@router.get("/health")
|
||||
async def ucxl_health_check() -> Dict[str, Any]:
|
||||
"""UCXL integration health check endpoint"""
|
||||
try:
|
||||
status = await ucxl_service.get_system_status()
|
||||
|
||||
health_status = "healthy"
|
||||
if status.get("system_health", 0) < 0.5:
|
||||
health_status = "degraded"
|
||||
if status.get("dht_nodes", 0) == 0:
|
||||
health_status = "offline"
|
||||
|
||||
return {
|
||||
"status": health_status,
|
||||
"ucxl_endpoints": status.get("ucxl_endpoints", 0),
|
||||
"dht_nodes": status.get("dht_nodes", 0),
|
||||
"bzzz_gateways": status.get("bzzz_gateways", 0),
|
||||
"cached_artifacts": status.get("cached_artifacts", 0),
|
||||
"system_health": status.get("system_health", 0),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"error": str(e),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
# Note: Exception handlers are registered at the app level, not router level
|
||||
@@ -1,8 +1,8 @@
|
||||
"""
|
||||
Hive API - Workflow Management Endpoints
|
||||
WHOOSH API - Workflow Management Endpoints
|
||||
|
||||
This module provides comprehensive API endpoints for managing multi-agent workflows
|
||||
in the Hive distributed orchestration platform. It handles workflow creation,
|
||||
in the WHOOSH distributed orchestration platform. It handles workflow creation,
|
||||
execution, monitoring, and lifecycle management.
|
||||
|
||||
Key Features:
|
||||
@@ -28,7 +28,7 @@ from ..models.responses import (
|
||||
from ..core.error_handlers import (
|
||||
coordinator_unavailable_error,
|
||||
validation_error,
|
||||
HiveAPIException
|
||||
WHOOSHAPIException
|
||||
)
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
@@ -42,7 +42,7 @@ router = APIRouter()
|
||||
status_code=status.HTTP_200_OK,
|
||||
summary="List all workflows",
|
||||
description="""
|
||||
Retrieve a comprehensive list of all workflows in the Hive system.
|
||||
Retrieve a comprehensive list of all workflows in the WHOOSH system.
|
||||
|
||||
This endpoint provides access to workflow definitions, templates, and metadata
|
||||
for building complex multi-agent orchestration pipelines.
|
||||
|
||||
Reference in New Issue
Block a user