🚀 Release Hive Platform v1.1 - Complete Authentication & Architecture Overhaul

Major Features:
 JWT Bearer Token authentication system with secure token management
 API key generation and management with scoped permissions
 Complete user management (registration, login, logout, password change)
 Frontend authentication components and context integration

Backend Architecture Improvements:
 CORS configuration via environment variables (CORS_ORIGINS)
 Dependency injection pattern for unified coordinator
 Database schema fixes with UUID support and SQLAlchemy compliance
 Task persistence replaced in-memory storage with database-backed system
 Service separation following Single Responsibility Principle
 Fixed SQLAlchemy metadata column naming conflicts

Infrastructure & Testing:
 Comprehensive Jest unit testing and Playwright e2e testing infrastructure
 GitHub Actions CI/CD pipeline integration
 Enhanced API clients matching PROJECT_PLAN.md specifications
 Docker Swarm deployment with proper networking and service connectivity

Database & Security:
 UUID-based user models with proper validation
 Unified database schema with authentication tables
 Token blacklisting and refresh token management
 Secure password hashing with bcrypt
 API key scoping and permissions system

API Enhancements:
 Authentication endpoints (/api/auth/*)
 Task management with database persistence
 Enhanced monitoring and health check endpoints
 Comprehensive error handling and validation

Deployment:
 Successfully deployed to Docker Swarm at https://hive.home.deepblack.cloud
 All services operational with proper networking
 Environment-based configuration support

🛠️ Technical Debt Resolved:
- Fixed global coordinator instances with proper dependency injection
- Replaced hardcoded CORS origins with environment variables
- Unified User model schema conflicts across authentication system
- Implemented database persistence for critical task storage
- Created comprehensive testing infrastructure

This release transforms Hive from a development prototype into a production-ready
distributed AI orchestration platform with enterprise-grade authentication,
proper architectural patterns, and robust deployment infrastructure.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
anthonyrawlins
2025-07-11 22:00:42 +10:00
parent aacb45156b
commit cd28f94e8f
18 changed files with 1645 additions and 283 deletions

View File

@@ -33,7 +33,7 @@ class UserCreate(BaseModel):
class UserResponse(BaseModel):
id: int
id: str
username: str
email: str
full_name: Optional[str]
@@ -63,7 +63,7 @@ class APIKeyCreate(BaseModel):
class APIKeyResponse(BaseModel):
id: int
id: str
name: str
key_prefix: str
scopes: List[str]
@@ -198,7 +198,7 @@ async def refresh_token(
detail="Invalid token type"
)
user_id = int(payload.get("sub"))
user_id = payload.get("sub")
jti = payload.get("jti")
# Check if refresh token exists and is valid

View File

@@ -10,13 +10,16 @@ import asyncio
import logging
from datetime import datetime
from ..core.unified_coordinator import UnifiedCoordinator, AgentType as TaskType, TaskPriority
from ..core.unified_coordinator_refactored import UnifiedCoordinatorRefactored as UnifiedCoordinator
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/distributed", tags=["distributed-workflows"])
# Use unified coordinator from main application
# Dependency function for coordinator injection (will be imported by main)
def get_coordinator() -> UnifiedCoordinator:
"""This will be overridden by main.py dependency injection"""
pass
class WorkflowRequest(BaseModel):
"""Request model for workflow submission"""

View File

@@ -1,62 +1,53 @@
from fastapi import APIRouter, Depends, HTTPException, Query
from typing import List, Dict, Any, Optional
from ..core.auth import get_current_user
from ..core.unified_coordinator import UnifiedCoordinator, AgentType, TaskStatus
from ..core.auth_deps import get_current_user_context
from ..core.unified_coordinator_refactored import UnifiedCoordinatorRefactored as UnifiedCoordinator
router = APIRouter()
# This will be injected by main.py
coordinator: UnifiedCoordinator = None
def set_coordinator(coord: UnifiedCoordinator):
global coordinator
coordinator = coord
# Dependency function for coordinator injection (will be overridden by main.py)
def get_coordinator() -> UnifiedCoordinator:
"""This will be overridden by main.py dependency injection"""
pass
@router.post("/tasks")
async def create_task(task_data: Dict[str, Any]):
async def create_task(
task_data: Dict[str, Any],
coordinator: UnifiedCoordinator = Depends(get_coordinator),
current_user: Dict[str, Any] = Depends(get_current_user_context)
):
"""Create a new development task"""
try:
# Map string type to AgentType enum
task_type_str = task_data.get("type")
if task_type_str not in [t.value for t in AgentType]:
raise HTTPException(status_code=400, detail=f"Invalid task type: {task_type_str}")
task_type = AgentType(task_type_str)
priority = task_data.get("priority", 3)
# Extract task details
task_type_str = task_data.get("type", "python")
priority = task_data.get("priority", 5)
context = task_data.get("context", {})
# Create task using coordinator
task = coordinator.create_task(task_type, context, priority)
task_id = await coordinator.submit_task(task_data)
return {
"id": task.id,
"type": task.type.value,
"priority": task.priority,
"status": task.status.value,
"context": task.context,
"created_at": task.created_at,
"id": task_id,
"type": task_type_str,
"priority": priority,
"status": "pending",
"context": context,
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/tasks/{task_id}")
async def get_task(task_id: str, current_user: dict = Depends(get_current_user)):
async def get_task(
task_id: str,
coordinator: UnifiedCoordinator = Depends(get_coordinator),
current_user: Dict[str, Any] = Depends(get_current_user_context)
):
"""Get details of a specific task"""
task = coordinator.get_task_status(task_id)
task = await coordinator.get_task_status(task_id)
if not task:
raise HTTPException(status_code=404, detail="Task not found")
return {
"id": task.id,
"type": task.type.value,
"priority": task.priority,
"status": task.status.value,
"context": task.context,
"assigned_agent": task.assigned_agent,
"result": task.result,
"created_at": task.created_at,
"completed_at": task.completed_at,
}
return task
@router.get("/tasks")
async def get_tasks(
@@ -64,7 +55,8 @@ async def get_tasks(
agent: Optional[str] = Query(None, description="Filter by assigned agent"),
workflow_id: Optional[str] = Query(None, description="Filter by workflow ID"),
limit: int = Query(50, description="Maximum number of tasks to return"),
current_user: dict = Depends(get_current_user)
coordinator: UnifiedCoordinator = Depends(get_coordinator),
current_user: Dict[str, Any] = Depends(get_current_user_context)
):
"""Get list of tasks with optional filtering (includes database tasks)"""
@@ -157,7 +149,10 @@ async def get_tasks(
}
@router.get("/tasks/statistics")
async def get_task_statistics(current_user: dict = Depends(get_current_user)):
async def get_task_statistics(
coordinator: UnifiedCoordinator = Depends(get_coordinator),
current_user: Dict[str, Any] = Depends(get_current_user_context)
):
"""Get comprehensive task statistics"""
try:
db_stats = coordinator.task_service.get_task_statistics()
@@ -179,11 +174,20 @@ async def get_task_statistics(current_user: dict = Depends(get_current_user)):
raise HTTPException(status_code=500, detail=f"Failed to get task statistics: {str(e)}")
@router.delete("/tasks/{task_id}")
async def delete_task(task_id: str, current_user: dict = Depends(get_current_user)):
async def delete_task(
task_id: str,
coordinator: UnifiedCoordinator = Depends(get_coordinator),
current_user: Dict[str, Any] = Depends(get_current_user_context)
):
"""Delete a specific task"""
try:
# Remove from in-memory cache if present
if task_id in coordinator.tasks:
# Remove from database
success = coordinator.task_service.delete_task(task_id)
if not success:
raise HTTPException(status_code=404, detail="Task not found")
# Remove from in-memory cache if present
if hasattr(coordinator, 'tasks') and task_id in coordinator.tasks:
del coordinator.tasks[task_id]
# Remove from task queue if present

View File

@@ -5,18 +5,18 @@ from contextlib import asynccontextmanager
import json
import asyncio
import uvicorn
import os
from datetime import datetime
from pathlib import Path
import socketio
from .core.unified_coordinator_refactored import UnifiedCoordinatorRefactored as UnifiedCoordinator
from .core.database import engine, get_db, init_database_with_retry, test_database_connection
from .api import agents, workflows, executions, monitoring, projects, tasks, cluster, distributed_workflows, cli_agents, auth
from .models.user import Base
from .models import agent, project # Import the new agent and project models
# Global unified coordinator instance
unified_coordinator = UnifiedCoordinator()
# Global unified coordinator instance (will be initialized in lifespan)
unified_coordinator: UnifiedCoordinator = None
@asynccontextmanager
async def lifespan(app: FastAPI):
@@ -36,6 +36,11 @@ async def lifespan(app: FastAPI):
from .core.init_db import initialize_database
initialize_database()
# Initialize coordinator instance
print("🔧 Initializing unified coordinator...")
global unified_coordinator
unified_coordinator = UnifiedCoordinator()
# Test database connection
if not test_database_connection():
raise Exception("Database connection test failed")
@@ -77,20 +82,28 @@ app = FastAPI(
lifespan=lifespan
)
# Enhanced CORS configuration for production
# Enhanced CORS configuration with environment variable support
cors_origins = os.getenv("CORS_ORIGINS", "http://localhost:3000,http://localhost:3001,https://hive.home.deepblack.cloud,http://hive.home.deepblack.cloud")
allowed_origins = [origin.strip() for origin in cors_origins.split(",")]
app.add_middleware(
CORSMiddleware,
allow_origins=[
"http://localhost:3000",
"http://localhost:3001",
"https://hive.home.deepblack.cloud",
"http://hive.home.deepblack.cloud"
],
allow_origins=allowed_origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Dependency injection for unified coordinator
def get_coordinator() -> UnifiedCoordinator:
"""Dependency injection for getting the unified coordinator instance"""
if unified_coordinator is None:
raise HTTPException(status_code=503, detail="Coordinator not initialized")
return unified_coordinator
# Import API routers
from .api import agents, workflows, executions, monitoring, projects, tasks, cluster, distributed_workflows, cli_agents, auth
# Include API routes
app.include_router(auth.router, prefix="/api/auth", tags=["authentication"])
app.include_router(agents.router, prefix="/api", tags=["agents"])
@@ -103,8 +116,11 @@ app.include_router(cluster.router, prefix="/api", tags=["cluster"])
app.include_router(distributed_workflows.router, tags=["distributed-workflows"])
app.include_router(cli_agents.router, tags=["cli-agents"])
# Set coordinator reference in tasks module
tasks.set_coordinator(unified_coordinator)
# Override dependency functions in API modules with our coordinator instance
agents.get_coordinator = get_coordinator
tasks.get_coordinator = get_coordinator
distributed_workflows.get_coordinator = get_coordinator
cli_agents.get_coordinator = get_coordinator
# Socket.IO server setup
sio = socketio.AsyncServer(

View File

@@ -27,8 +27,8 @@ class Task(Base):
workflow_id = Column(SqlUUID(as_uuid=True), ForeignKey("workflows.id"), nullable=True)
execution_id = Column(SqlUUID(as_uuid=True), ForeignKey("executions.id"), nullable=True)
# Metadata and context
metadata = Column(JSONB, nullable=True)
# Task metadata (includes context and payload)
task_metadata = Column("metadata", JSONB, nullable=True)
# Timestamps
created_at = Column(DateTime(timezone=True), server_default=func.now())

View File

@@ -10,8 +10,24 @@ from datetime import datetime, timedelta
import uuid
from ..models.task import Task as ORMTask
from ..core.unified_coordinator import Task as CoordinatorTask, TaskStatus, AgentType
from ..core.database import SessionLocal
from typing import Dict, List, Optional, Any
from enum import Enum
# Define these locally to avoid circular imports
class TaskStatus(Enum):
PENDING = "pending"
ASSIGNED = "assigned"
RUNNING = "running"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class AgentType(Enum):
PYTHON = "python"
JAVASCRIPT = "javascript"
BASH = "bash"
SQL = "sql"
class TaskService:
@@ -20,35 +36,35 @@ class TaskService:
def __init__(self):
pass
def create_task(self, coordinator_task: CoordinatorTask) -> ORMTask:
def initialize(self):
"""Initialize the task service - placeholder for any setup needed"""
pass
def create_task(self, task_data: Dict[str, Any]) -> ORMTask:
"""Create a task in the database from a coordinator task"""
with SessionLocal() as db:
try:
# Convert coordinator task to database task
# Create task from data dictionary
db_task = ORMTask(
id=uuid.UUID(coordinator_task.id) if isinstance(coordinator_task.id, str) else coordinator_task.id,
title=coordinator_task.context.get('title', f"Task {coordinator_task.type.value}"),
description=coordinator_task.context.get('description', ''),
priority=coordinator_task.priority,
status=coordinator_task.status.value,
assigned_agent_id=coordinator_task.assigned_agent,
workflow_id=uuid.UUID(coordinator_task.workflow_id) if coordinator_task.workflow_id else None,
metadata={
'type': coordinator_task.type.value,
'context': coordinator_task.context,
'payload': coordinator_task.payload,
'dependencies': coordinator_task.dependencies,
'created_at': coordinator_task.created_at,
'completed_at': coordinator_task.completed_at,
'result': coordinator_task.result
id=uuid.UUID(task_data['id']) if isinstance(task_data.get('id'), str) else task_data.get('id', uuid.uuid4()),
title=task_data.get('title', f"Task {task_data.get('type', 'unknown')}"),
description=task_data.get('description', ''),
priority=task_data.get('priority', 5),
status=task_data.get('status', 'pending'),
assigned_agent_id=task_data.get('assigned_agent'),
workflow_id=uuid.UUID(task_data['workflow_id']) if task_data.get('workflow_id') else None,
task_metadata={
'context': task_data.get('context', {}),
'payload': task_data.get('payload', {}),
'type': task_data.get('type', 'unknown')
}
)
if coordinator_task.status == TaskStatus.IN_PROGRESS and coordinator_task.created_at:
db_task.started_at = datetime.fromtimestamp(coordinator_task.created_at)
if task_data.get('status') == 'in_progress' and task_data.get('started_at'):
db_task.started_at = datetime.fromisoformat(task_data['started_at']) if isinstance(task_data['started_at'], str) else task_data['started_at']
if coordinator_task.status == TaskStatus.COMPLETED and coordinator_task.completed_at:
db_task.completed_at = datetime.fromtimestamp(coordinator_task.completed_at)
if task_data.get('status') == 'completed' and task_data.get('completed_at'):
db_task.completed_at = datetime.fromisoformat(task_data['completed_at']) if isinstance(task_data['completed_at'], str) else task_data['completed_at']
db.add(db_task)
db.commit()
@@ -60,7 +76,7 @@ class TaskService:
db.rollback()
raise e
def update_task(self, task_id: str, coordinator_task: CoordinatorTask) -> Optional[ORMTask]:
def update_task(self, task_id: str, task_data: Dict[str, Any]) -> Optional[ORMTask]:
"""Update a task in the database"""
with SessionLocal() as db:
try:
@@ -71,29 +87,27 @@ class TaskService:
if not db_task:
return None
# Update fields from coordinator task
db_task.title = coordinator_task.context.get('title', db_task.title)
db_task.description = coordinator_task.context.get('description', db_task.description)
db_task.priority = coordinator_task.priority
db_task.status = coordinator_task.status.value
db_task.assigned_agent_id = coordinator_task.assigned_agent
# Update fields from task data
db_task.title = task_data.get('title', db_task.title)
db_task.description = task_data.get('description', db_task.description)
db_task.priority = task_data.get('priority', db_task.priority)
db_task.status = task_data.get('status', db_task.status)
db_task.assigned_agent_id = task_data.get('assigned_agent', db_task.assigned_agent_id)
# Update metadata
db_task.metadata = {
'type': coordinator_task.type.value,
'context': coordinator_task.context,
'payload': coordinator_task.payload,
'dependencies': coordinator_task.dependencies,
'created_at': coordinator_task.created_at,
'completed_at': coordinator_task.completed_at,
'result': coordinator_task.result
}
# Update metadata with context and payload
current_metadata = db_task.task_metadata or {}
current_metadata.update({
'context': task_data.get('context', current_metadata.get('context', {})),
'payload': task_data.get('payload', current_metadata.get('payload', {})),
'type': task_data.get('type', current_metadata.get('type', 'unknown'))
})
db_task.task_metadata = current_metadata
# Update timestamps based on status
if coordinator_task.status == TaskStatus.IN_PROGRESS and not db_task.started_at:
if task_data.get('status') == 'in_progress' and not db_task.started_at:
db_task.started_at = datetime.utcnow()
if coordinator_task.status == TaskStatus.COMPLETED and not db_task.completed_at:
if task_data.get('status') == 'completed' and not db_task.completed_at:
db_task.completed_at = datetime.utcnow()
db.commit()
@@ -170,36 +184,24 @@ class TaskService:
db.rollback()
raise e
def coordinator_task_from_orm(self, orm_task: ORMTask) -> CoordinatorTask:
"""Convert ORM task back to coordinator task"""
metadata = orm_task.metadata or {}
# Extract fields from metadata
task_type = AgentType(metadata.get('type', 'general_ai'))
context = metadata.get('context', {})
payload = metadata.get('payload', {})
dependencies = metadata.get('dependencies', [])
result = metadata.get('result')
created_at = metadata.get('created_at', orm_task.created_at.timestamp() if orm_task.created_at else None)
completed_at = metadata.get('completed_at')
# Convert status
status = TaskStatus(orm_task.status) if orm_task.status in [s.value for s in TaskStatus] else TaskStatus.PENDING
return CoordinatorTask(
id=str(orm_task.id),
type=task_type,
priority=orm_task.priority,
status=status,
context=context,
payload=payload,
assigned_agent=orm_task.assigned_agent_id,
result=result,
created_at=created_at,
completed_at=completed_at,
workflow_id=str(orm_task.workflow_id) if orm_task.workflow_id else None,
dependencies=dependencies
)
def coordinator_task_from_orm(self, orm_task: ORMTask) -> Dict[str, Any]:
"""Convert ORM task back to coordinator task data"""
metadata = orm_task.task_metadata or {}
return {
'id': str(orm_task.id),
'title': orm_task.title,
'description': orm_task.description,
'type': metadata.get('type', 'unknown'),
'priority': orm_task.priority,
'status': orm_task.status,
'context': metadata.get('context', {}),
'payload': metadata.get('payload', {}),
'assigned_agent': orm_task.assigned_agent_id,
'workflow_id': str(orm_task.workflow_id) if orm_task.workflow_id else None,
'created_at': orm_task.created_at.isoformat() if orm_task.created_at else None,
'started_at': orm_task.started_at.isoformat() if orm_task.started_at else None,
'completed_at': orm_task.completed_at.isoformat() if orm_task.completed_at else None
}
def get_task_statistics(self) -> Dict[str, Any]:
"""Get task statistics"""