Fix frontend URLs for production deployment and resolve database issues
- Update API base URL from localhost to https://api.hive.home.deepblack.cloud - Update WebSocket URL to https://hive.home.deepblack.cloud for proper TLS routing - Remove metadata field from Project model to fix SQLAlchemy conflict - Remove index from JSON expertise column in AgentRole to fix PostgreSQL indexing - Update push script to use local registry instead of Docker Hub - Add Gitea repository support and monitoring endpoints 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
294
backend/app/api/repository.py
Normal file
294
backend/app/api/repository.py
Normal file
@@ -0,0 +1,294 @@
|
|||||||
|
"""
|
||||||
|
Repository management API endpoints
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from typing import List, Dict, Any, Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from ..core.database import get_db
|
||||||
|
from ..models.project import Project
|
||||||
|
from ..services.repository_service import repository_service
|
||||||
|
from ..auth.auth import get_current_user
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.get("/repositories", response_model=List[Dict[str, Any]])
|
||||||
|
async def list_repositories(
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: dict = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""List all repositories with bzzz integration enabled"""
|
||||||
|
try:
|
||||||
|
projects = db.query(Project).filter(
|
||||||
|
Project.bzzz_enabled == True
|
||||||
|
).all()
|
||||||
|
|
||||||
|
repositories = []
|
||||||
|
for project in projects:
|
||||||
|
repo_data = {
|
||||||
|
"id": project.id,
|
||||||
|
"name": project.name,
|
||||||
|
"description": project.description,
|
||||||
|
"provider": project.provider or "github",
|
||||||
|
"provider_base_url": project.provider_base_url,
|
||||||
|
"owner": project.git_owner,
|
||||||
|
"repository": project.git_repository,
|
||||||
|
"branch": project.git_branch,
|
||||||
|
"status": project.status,
|
||||||
|
"bzzz_enabled": project.bzzz_enabled,
|
||||||
|
"ready_to_claim": project.ready_to_claim,
|
||||||
|
"auto_assignment": getattr(project, "auto_assignment", True),
|
||||||
|
"created_at": project.created_at.isoformat() if project.created_at else None
|
||||||
|
}
|
||||||
|
repositories.append(repo_data)
|
||||||
|
|
||||||
|
return repositories
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to list repositories: {str(e)}")
|
||||||
|
|
||||||
|
@router.post("/repositories/sync")
|
||||||
|
async def sync_repositories(
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
repository_ids: Optional[List[int]] = None,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: dict = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Sync tasks from repositories"""
|
||||||
|
try:
|
||||||
|
if repository_ids:
|
||||||
|
# Sync specific repositories
|
||||||
|
projects = db.query(Project).filter(
|
||||||
|
Project.id.in_(repository_ids),
|
||||||
|
Project.bzzz_enabled == True
|
||||||
|
).all()
|
||||||
|
|
||||||
|
if not projects:
|
||||||
|
raise HTTPException(status_code=404, detail="No matching repositories found")
|
||||||
|
|
||||||
|
results = {"synced_projects": 0, "new_tasks": 0, "assigned_tasks": 0, "errors": []}
|
||||||
|
|
||||||
|
for project in projects:
|
||||||
|
try:
|
||||||
|
sync_result = await repository_service.sync_project_tasks(db, project)
|
||||||
|
results["synced_projects"] += 1
|
||||||
|
results["new_tasks"] += sync_result.get("new_tasks", 0)
|
||||||
|
results["assigned_tasks"] += sync_result.get("assigned_tasks", 0)
|
||||||
|
except Exception as e:
|
||||||
|
results["errors"].append(f"Project {project.name}: {str(e)}")
|
||||||
|
|
||||||
|
return results
|
||||||
|
else:
|
||||||
|
# Sync all repositories in background
|
||||||
|
background_tasks.add_task(repository_service.sync_all_repositories, db)
|
||||||
|
return {"message": "Repository sync started in background", "status": "initiated"}
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to sync repositories: {str(e)}")
|
||||||
|
|
||||||
|
@router.get("/repositories/{repository_id}/stats")
|
||||||
|
async def get_repository_stats(
|
||||||
|
repository_id: int,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: dict = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Get task statistics for a specific repository"""
|
||||||
|
try:
|
||||||
|
stats = await repository_service.get_project_task_stats(db, repository_id)
|
||||||
|
return stats
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to get repository stats: {str(e)}")
|
||||||
|
|
||||||
|
@router.post("/repositories/{repository_id}/sync")
|
||||||
|
async def sync_repository(
|
||||||
|
repository_id: int,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: dict = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Sync tasks from a specific repository"""
|
||||||
|
try:
|
||||||
|
project = db.query(Project).filter(
|
||||||
|
Project.id == repository_id,
|
||||||
|
Project.bzzz_enabled == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not project:
|
||||||
|
raise HTTPException(status_code=404, detail="Repository not found or bzzz integration not enabled")
|
||||||
|
|
||||||
|
result = await repository_service.sync_project_tasks(db, project)
|
||||||
|
return result
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to sync repository: {str(e)}")
|
||||||
|
|
||||||
|
@router.put("/repositories/{repository_id}/config")
|
||||||
|
async def update_repository_config(
|
||||||
|
repository_id: int,
|
||||||
|
config_data: Dict[str, Any],
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: dict = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Update repository configuration"""
|
||||||
|
try:
|
||||||
|
project = db.query(Project).filter(Project.id == repository_id).first()
|
||||||
|
|
||||||
|
if not project:
|
||||||
|
raise HTTPException(status_code=404, detail="Repository not found")
|
||||||
|
|
||||||
|
# Update allowed configuration fields
|
||||||
|
if "auto_assignment" in config_data:
|
||||||
|
setattr(project, "auto_assignment", config_data["auto_assignment"])
|
||||||
|
|
||||||
|
if "bzzz_enabled" in config_data:
|
||||||
|
project.bzzz_enabled = config_data["bzzz_enabled"]
|
||||||
|
|
||||||
|
if "ready_to_claim" in config_data:
|
||||||
|
project.ready_to_claim = config_data["ready_to_claim"]
|
||||||
|
|
||||||
|
if "status" in config_data and config_data["status"] in ["active", "inactive", "archived"]:
|
||||||
|
project.status = config_data["status"]
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return {"message": "Repository configuration updated", "repository_id": repository_id}
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
db.rollback()
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to update repository config: {str(e)}")
|
||||||
|
|
||||||
|
@router.get("/repositories/{repository_id}/tasks")
|
||||||
|
async def get_repository_tasks(
|
||||||
|
repository_id: int,
|
||||||
|
limit: int = 50,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: dict = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Get available tasks from a repository"""
|
||||||
|
try:
|
||||||
|
project = db.query(Project).filter(
|
||||||
|
Project.id == repository_id,
|
||||||
|
Project.bzzz_enabled == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not project:
|
||||||
|
raise HTTPException(status_code=404, detail="Repository not found or bzzz integration not enabled")
|
||||||
|
|
||||||
|
# Get repository client and fetch tasks
|
||||||
|
repo_client = await repository_service._get_repository_client(project)
|
||||||
|
if not repo_client:
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to create repository client")
|
||||||
|
|
||||||
|
tasks = await repo_client.list_available_tasks()
|
||||||
|
|
||||||
|
# Limit results
|
||||||
|
if len(tasks) > limit:
|
||||||
|
tasks = tasks[:limit]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"repository_id": repository_id,
|
||||||
|
"repository_name": project.name,
|
||||||
|
"provider": project.provider or "github",
|
||||||
|
"tasks": tasks,
|
||||||
|
"total_tasks": len(tasks)
|
||||||
|
}
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to get repository tasks: {str(e)}")
|
||||||
|
|
||||||
|
@router.post("/repositories/discover")
|
||||||
|
async def discover_repositories(
|
||||||
|
provider: str = "gitea",
|
||||||
|
base_url: str = "http://192.168.1.113:3000",
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: dict = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Discover repositories from a provider (placeholder for future implementation)"""
|
||||||
|
try:
|
||||||
|
# This would implement repository discovery functionality
|
||||||
|
# For now, return the manually configured repositories
|
||||||
|
|
||||||
|
existing_repos = db.query(Project).filter(
|
||||||
|
Project.provider == provider,
|
||||||
|
Project.provider_base_url == base_url
|
||||||
|
).all()
|
||||||
|
|
||||||
|
discovered = []
|
||||||
|
for repo in existing_repos:
|
||||||
|
discovered.append({
|
||||||
|
"name": repo.name,
|
||||||
|
"owner": repo.git_owner,
|
||||||
|
"repository": repo.git_repository,
|
||||||
|
"description": repo.description,
|
||||||
|
"already_configured": True
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"provider": provider,
|
||||||
|
"base_url": base_url,
|
||||||
|
"discovered_repositories": discovered
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to discover repositories: {str(e)}")
|
||||||
|
|
||||||
|
@router.post("/webhook/{repository_id}")
|
||||||
|
async def handle_repository_webhook(
|
||||||
|
repository_id: int,
|
||||||
|
payload: Dict[str, Any],
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Handle webhook events from repositories"""
|
||||||
|
try:
|
||||||
|
project = db.query(Project).filter(Project.id == repository_id).first()
|
||||||
|
|
||||||
|
if not project:
|
||||||
|
raise HTTPException(status_code=404, detail="Repository not found")
|
||||||
|
|
||||||
|
# Log the webhook event (would be stored in webhook_events table)
|
||||||
|
event_type = payload.get("action", "unknown")
|
||||||
|
|
||||||
|
# For now, just trigger a sync if it's an issue event
|
||||||
|
if "issue" in payload and event_type in ["opened", "labeled", "unlabeled"]:
|
||||||
|
# Check if it's a bzzz-task
|
||||||
|
issue = payload.get("issue", {})
|
||||||
|
labels = [label["name"] for label in issue.get("labels", [])]
|
||||||
|
|
||||||
|
if "bzzz-task" in labels:
|
||||||
|
# Trigger task sync for this project
|
||||||
|
await repository_service.sync_project_tasks(db, project)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"message": "Webhook processed, task sync triggered",
|
||||||
|
"event_type": event_type,
|
||||||
|
"issue_number": issue.get("number")
|
||||||
|
}
|
||||||
|
|
||||||
|
return {"message": "Webhook received", "event_type": event_type}
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to process webhook: {str(e)}")
|
||||||
|
|
||||||
|
@router.delete("/repositories/cache")
|
||||||
|
async def clear_task_cache(
|
||||||
|
current_user: dict = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Clear the task cache"""
|
||||||
|
try:
|
||||||
|
await repository_service.cleanup_old_cache(max_age_hours=0) # Clear all
|
||||||
|
return {"message": "Task cache cleared"}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to clear cache: {str(e)}")
|
||||||
@@ -11,7 +11,7 @@ class AgentRole(Base):
|
|||||||
display_name = Column(String, nullable=False) # Human-readable name
|
display_name = Column(String, nullable=False) # Human-readable name
|
||||||
system_prompt = Column(Text, nullable=False) # Role-specific system prompt
|
system_prompt = Column(Text, nullable=False) # Role-specific system prompt
|
||||||
reports_to = Column(JSON, nullable=True) # Array of roles this role reports to
|
reports_to = Column(JSON, nullable=True) # Array of roles this role reports to
|
||||||
expertise = Column(JSON, nullable=True, index=True) # Array of expertise areas
|
expertise = Column(JSON, nullable=True) # Array of expertise areas
|
||||||
deliverables = Column(JSON, nullable=True) # Array of deliverables
|
deliverables = Column(JSON, nullable=True) # Array of deliverables
|
||||||
capabilities = Column(JSON, nullable=True) # Array of capabilities
|
capabilities = Column(JSON, nullable=True) # Array of capabilities
|
||||||
collaboration_defaults = Column(JSON, nullable=True) # Default collaboration settings
|
collaboration_defaults = Column(JSON, nullable=True) # Default collaboration settings
|
||||||
|
|||||||
@@ -23,8 +23,7 @@ class Project(Base):
|
|||||||
private_repo = Column(Boolean, default=False)
|
private_repo = Column(Boolean, default=False)
|
||||||
github_token_required = Column(Boolean, default=False)
|
github_token_required = Column(Boolean, default=False)
|
||||||
|
|
||||||
# Additional metadata
|
# Additional configuration
|
||||||
metadata = Column(JSON, nullable=True)
|
|
||||||
tags = Column(JSON, nullable=True)
|
tags = Column(JSON, nullable=True)
|
||||||
|
|
||||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||||
|
|||||||
477
backend/app/services/repository_service.py
Normal file
477
backend/app/services/repository_service.py
Normal file
@@ -0,0 +1,477 @@
|
|||||||
|
"""
|
||||||
|
Repository service for managing task monitoring across different providers (GitHub, Gitea)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Dict, List, Optional, Any, Tuple
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy import and_, or_
|
||||||
|
|
||||||
|
from ..core.database import get_db
|
||||||
|
from ..models.project import Project
|
||||||
|
from ..models.agent import Agent
|
||||||
|
from .agent_service import AgentService
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class RepositoryService:
|
||||||
|
def __init__(self):
|
||||||
|
self.agent_service = AgentService()
|
||||||
|
self._task_cache = {}
|
||||||
|
self._last_sync = {}
|
||||||
|
|
||||||
|
async def sync_all_repositories(self, db: Session) -> Dict[str, Any]:
|
||||||
|
"""Sync tasks from all enabled repositories"""
|
||||||
|
results = {
|
||||||
|
"synced_projects": 0,
|
||||||
|
"new_tasks": 0,
|
||||||
|
"assigned_tasks": 0,
|
||||||
|
"errors": []
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get all active projects with bzzz enabled
|
||||||
|
projects = db.query(Project).filter(
|
||||||
|
and_(
|
||||||
|
Project.status == "active",
|
||||||
|
Project.bzzz_enabled == True
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
for project in projects:
|
||||||
|
try:
|
||||||
|
sync_result = await self.sync_project_tasks(db, project)
|
||||||
|
results["synced_projects"] += 1
|
||||||
|
results["new_tasks"] += sync_result.get("new_tasks", 0)
|
||||||
|
results["assigned_tasks"] += sync_result.get("assigned_tasks", 0)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Failed to sync project {project.name}: {str(e)}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
results["errors"].append(error_msg)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
async def sync_project_tasks(self, db: Session, project: Project) -> Dict[str, Any]:
|
||||||
|
"""Sync tasks for a specific project"""
|
||||||
|
result = {
|
||||||
|
"project_id": project.id,
|
||||||
|
"project_name": project.name,
|
||||||
|
"new_tasks": 0,
|
||||||
|
"assigned_tasks": 0,
|
||||||
|
"provider": project.provider or "github"
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get repository client based on provider
|
||||||
|
repo_client = await self._get_repository_client(project)
|
||||||
|
if not repo_client:
|
||||||
|
raise Exception(f"Could not create repository client for {project.provider}")
|
||||||
|
|
||||||
|
# Fetch available tasks
|
||||||
|
tasks = await repo_client.list_available_tasks()
|
||||||
|
result["new_tasks"] = len(tasks)
|
||||||
|
|
||||||
|
# Process each task for potential assignment
|
||||||
|
for task in tasks:
|
||||||
|
try:
|
||||||
|
assigned = await self._process_task_for_assignment(db, project, task)
|
||||||
|
if assigned:
|
||||||
|
result["assigned_tasks"] += 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to process task {task.get('number', 'unknown')}: {str(e)}")
|
||||||
|
|
||||||
|
# Update last sync time
|
||||||
|
self._last_sync[project.id] = datetime.now()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error syncing project {project.name}: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def _get_repository_client(self, project: Project):
|
||||||
|
"""Get appropriate repository client based on project provider"""
|
||||||
|
provider = project.provider or "github"
|
||||||
|
|
||||||
|
if provider == "gitea":
|
||||||
|
return await self._create_gitea_client(project)
|
||||||
|
elif provider == "github":
|
||||||
|
return await self._create_github_client(project)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported provider: {provider}")
|
||||||
|
|
||||||
|
async def _create_gitea_client(self, project: Project):
|
||||||
|
"""Create Gitea API client"""
|
||||||
|
try:
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
class GiteaClient:
|
||||||
|
def __init__(self, base_url: str, owner: str, repo: str, token: str = None):
|
||||||
|
self.base_url = base_url.rstrip('/')
|
||||||
|
self.owner = owner
|
||||||
|
self.repo = repo
|
||||||
|
self.token = token
|
||||||
|
self.session = None
|
||||||
|
|
||||||
|
async def list_available_tasks(self) -> List[Dict]:
|
||||||
|
"""List open issues with bzzz-task label"""
|
||||||
|
if not self.session:
|
||||||
|
self.session = aiohttp.ClientSession()
|
||||||
|
|
||||||
|
url = f"{self.base_url}/api/v1/repos/{self.owner}/{self.repo}/issues"
|
||||||
|
params = {
|
||||||
|
"state": "open",
|
||||||
|
"labels": "bzzz-task",
|
||||||
|
"limit": 50
|
||||||
|
}
|
||||||
|
|
||||||
|
headers = {}
|
||||||
|
if self.token:
|
||||||
|
headers["Authorization"] = f"token {self.token}"
|
||||||
|
|
||||||
|
async with self.session.get(url, params=params, headers=headers) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
issues = await response.json()
|
||||||
|
return [self._convert_issue_to_task(issue) for issue in issues
|
||||||
|
if not issue.get("assignee")] # Only unassigned tasks
|
||||||
|
else:
|
||||||
|
logger.error(f"Gitea API error: {response.status}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _convert_issue_to_task(self, issue: Dict) -> Dict:
|
||||||
|
"""Convert Gitea issue to task format"""
|
||||||
|
labels = [label["name"] for label in issue.get("labels", [])]
|
||||||
|
|
||||||
|
# Extract role and expertise from labels
|
||||||
|
required_role = self._extract_required_role(labels)
|
||||||
|
required_expertise = self._extract_required_expertise(labels)
|
||||||
|
priority = self._extract_priority(labels)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": issue["id"],
|
||||||
|
"number": issue["number"],
|
||||||
|
"title": issue["title"],
|
||||||
|
"description": issue.get("body", ""),
|
||||||
|
"state": issue["state"],
|
||||||
|
"labels": labels,
|
||||||
|
"created_at": issue["created_at"],
|
||||||
|
"updated_at": issue["updated_at"],
|
||||||
|
"provider": "gitea",
|
||||||
|
"repository": f"{self.owner}/{self.repo}",
|
||||||
|
"required_role": required_role,
|
||||||
|
"required_expertise": required_expertise,
|
||||||
|
"priority": priority,
|
||||||
|
"task_type": self._extract_task_type(labels, issue.get("body", "")),
|
||||||
|
"url": issue.get("html_url", "")
|
||||||
|
}
|
||||||
|
|
||||||
|
def _extract_required_role(self, labels: List[str]) -> str:
|
||||||
|
"""Extract required role from labels"""
|
||||||
|
role_map = {
|
||||||
|
"frontend": "frontend_developer",
|
||||||
|
"backend": "backend_developer",
|
||||||
|
"security": "security_expert",
|
||||||
|
"design": "ui_ux_designer",
|
||||||
|
"devops": "devops_engineer",
|
||||||
|
"documentation": "technical_writer",
|
||||||
|
"bug": "qa_engineer",
|
||||||
|
"architecture": "senior_software_architect"
|
||||||
|
}
|
||||||
|
|
||||||
|
for label in labels:
|
||||||
|
label_lower = label.lower()
|
||||||
|
if label_lower in role_map:
|
||||||
|
return role_map[label_lower]
|
||||||
|
|
||||||
|
return "full_stack_engineer" # Default
|
||||||
|
|
||||||
|
def _extract_required_expertise(self, labels: List[str]) -> List[str]:
|
||||||
|
"""Extract required expertise from labels"""
|
||||||
|
expertise = []
|
||||||
|
expertise_map = {
|
||||||
|
"frontend": ["frontend", "javascript", "ui_development"],
|
||||||
|
"backend": ["backend", "api_development", "server_frameworks"],
|
||||||
|
"database": ["database", "sql", "data_modeling"],
|
||||||
|
"security": ["security", "cybersecurity", "vulnerability_analysis"],
|
||||||
|
"testing": ["testing", "qa_methodologies", "debugging"],
|
||||||
|
"devops": ["deployment", "infrastructure", "automation"],
|
||||||
|
"design": ["design", "user_experience", "prototyping"]
|
||||||
|
}
|
||||||
|
|
||||||
|
for label in labels:
|
||||||
|
label_lower = label.lower()
|
||||||
|
if label_lower in expertise_map:
|
||||||
|
expertise.extend(expertise_map[label_lower])
|
||||||
|
|
||||||
|
return list(set(expertise)) if expertise else ["general_development"]
|
||||||
|
|
||||||
|
def _extract_priority(self, labels: List[str]) -> int:
|
||||||
|
"""Extract priority from labels"""
|
||||||
|
for label in labels:
|
||||||
|
if "priority-" in label.lower():
|
||||||
|
try:
|
||||||
|
return int(label.lower().split("priority-")[1])
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
pass
|
||||||
|
elif label.lower() in ["urgent", "critical"]:
|
||||||
|
return 10
|
||||||
|
elif label.lower() in ["high"]:
|
||||||
|
return 8
|
||||||
|
elif label.lower() in ["low"]:
|
||||||
|
return 3
|
||||||
|
|
||||||
|
return 5 # Default priority
|
||||||
|
|
||||||
|
def _extract_task_type(self, labels: List[str], body: str) -> str:
|
||||||
|
"""Extract task type from labels and body"""
|
||||||
|
for label in labels:
|
||||||
|
label_lower = label.lower()
|
||||||
|
if label_lower in ["bug", "bugfix"]:
|
||||||
|
return "bug_fix"
|
||||||
|
elif label_lower in ["enhancement", "feature"]:
|
||||||
|
return "feature"
|
||||||
|
elif label_lower in ["documentation", "docs"]:
|
||||||
|
return "documentation"
|
||||||
|
elif label_lower in ["security"]:
|
||||||
|
return "security"
|
||||||
|
elif label_lower in ["refactor", "refactoring"]:
|
||||||
|
return "refactoring"
|
||||||
|
|
||||||
|
return "general"
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
if self.session:
|
||||||
|
await self.session.close()
|
||||||
|
|
||||||
|
# Create and return Gitea client
|
||||||
|
base_url = project.provider_base_url or "http://192.168.1.113:3000"
|
||||||
|
token = None # TODO: Get from secure storage
|
||||||
|
|
||||||
|
return GiteaClient(
|
||||||
|
base_url=base_url,
|
||||||
|
owner=project.git_owner,
|
||||||
|
repo=project.git_repository,
|
||||||
|
token=token
|
||||||
|
)
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
logger.error("aiohttp not available for Gitea client")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to create Gitea client: {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def _create_github_client(self, project: Project):
|
||||||
|
"""Create GitHub API client (placeholder for now)"""
|
||||||
|
# TODO: Implement GitHub client similar to Gitea
|
||||||
|
logger.warning("GitHub client not yet implemented")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def _process_task_for_assignment(self, db: Session, project: Project, task: Dict) -> bool:
|
||||||
|
"""Process a task for automatic assignment to suitable agents"""
|
||||||
|
try:
|
||||||
|
# Check if auto-assignment is enabled for this project
|
||||||
|
if not getattr(project, 'auto_assignment', True):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if task was already processed recently
|
||||||
|
task_key = f"{project.id}:{task['number']}"
|
||||||
|
if task_key in self._task_cache:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Find suitable agents for this task
|
||||||
|
suitable_agents = await self._find_suitable_agents(db, task)
|
||||||
|
|
||||||
|
if not suitable_agents:
|
||||||
|
logger.info(f"No suitable agents found for task {task['number']} in {project.name}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Select best agent (first in sorted list)
|
||||||
|
selected_agent = suitable_agents[0]
|
||||||
|
|
||||||
|
# Log the assignment attempt
|
||||||
|
await self._log_task_assignment(db, project, task, selected_agent, "auto_assigned")
|
||||||
|
|
||||||
|
# Cache this task to avoid reprocessing
|
||||||
|
self._task_cache[task_key] = {
|
||||||
|
"assigned_at": datetime.now(),
|
||||||
|
"agent_id": selected_agent["id"],
|
||||||
|
"task": task
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Assigned task {task['number']} to agent {selected_agent['id']} ({selected_agent['role']})")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error processing task {task.get('number', 'unknown')} for assignment: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _find_suitable_agents(self, db: Session, task: Dict) -> List[Dict]:
|
||||||
|
"""Find agents suitable for a task based on role and expertise"""
|
||||||
|
try:
|
||||||
|
# Get all online agents
|
||||||
|
agents = db.query(Agent).filter(
|
||||||
|
and_(
|
||||||
|
Agent.status.in_(["online", "ready"]),
|
||||||
|
Agent.role.isnot(None) # Only agents with assigned roles
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
if not agents:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Convert to dict format for scoring
|
||||||
|
agent_infos = []
|
||||||
|
for agent in agents:
|
||||||
|
agent_info = {
|
||||||
|
"id": agent.id,
|
||||||
|
"role": agent.role,
|
||||||
|
"expertise": agent.expertise or [],
|
||||||
|
"current_tasks": agent.current_tasks or 0,
|
||||||
|
"max_tasks": agent.max_concurrent or 2,
|
||||||
|
"performance": 0.8, # Default performance score
|
||||||
|
"availability": 1.0 if agent.status == "ready" else 0.7,
|
||||||
|
"last_seen": agent.last_seen or datetime.now()
|
||||||
|
}
|
||||||
|
agent_infos.append(agent_info)
|
||||||
|
|
||||||
|
# Score agents for this task
|
||||||
|
scored_agents = []
|
||||||
|
for agent_info in agent_infos:
|
||||||
|
# Skip if agent is at capacity
|
||||||
|
if agent_info["current_tasks"] >= agent_info["max_tasks"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
score = self._calculate_agent_task_score(task, agent_info)
|
||||||
|
if score > 0.3: # Minimum threshold
|
||||||
|
scored_agents.append({
|
||||||
|
**agent_info,
|
||||||
|
"score": score
|
||||||
|
})
|
||||||
|
|
||||||
|
# Sort by score (highest first)
|
||||||
|
scored_agents.sort(key=lambda x: x["score"], reverse=True)
|
||||||
|
|
||||||
|
return scored_agents[:3] # Return top 3 candidates
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error finding suitable agents: {str(e)}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _calculate_agent_task_score(self, task: Dict, agent_info: Dict) -> float:
|
||||||
|
"""Calculate how suitable an agent is for a task"""
|
||||||
|
score = 0.0
|
||||||
|
|
||||||
|
# Role matching
|
||||||
|
task_role = task.get("required_role", "")
|
||||||
|
agent_role = agent_info.get("role", "")
|
||||||
|
|
||||||
|
if task_role == agent_role:
|
||||||
|
score += 0.5 # Perfect role match
|
||||||
|
elif self._is_compatible_role(task_role, agent_role):
|
||||||
|
score += 0.3 # Compatible role
|
||||||
|
elif agent_role == "full_stack_engineer":
|
||||||
|
score += 0.2 # Full-stack can handle most tasks
|
||||||
|
|
||||||
|
# Expertise matching
|
||||||
|
task_expertise = task.get("required_expertise", [])
|
||||||
|
agent_expertise = agent_info.get("expertise", [])
|
||||||
|
|
||||||
|
if task_expertise and agent_expertise:
|
||||||
|
expertise_overlap = len(set(task_expertise) & set(agent_expertise))
|
||||||
|
expertise_score = expertise_overlap / len(task_expertise)
|
||||||
|
score += expertise_score * 0.3
|
||||||
|
|
||||||
|
# Priority bonus
|
||||||
|
priority = task.get("priority", 5)
|
||||||
|
priority_bonus = (priority / 10.0) * 0.1
|
||||||
|
score += priority_bonus
|
||||||
|
|
||||||
|
# Availability bonus
|
||||||
|
availability = agent_info.get("availability", 1.0)
|
||||||
|
score *= availability
|
||||||
|
|
||||||
|
# Workload penalty
|
||||||
|
current_tasks = agent_info.get("current_tasks", 0)
|
||||||
|
max_tasks = agent_info.get("max_tasks", 2)
|
||||||
|
workload_ratio = current_tasks / max_tasks
|
||||||
|
workload_penalty = workload_ratio * 0.2
|
||||||
|
score -= workload_penalty
|
||||||
|
|
||||||
|
return max(0.0, min(1.0, score))
|
||||||
|
|
||||||
|
def _is_compatible_role(self, required_role: str, agent_role: str) -> bool:
|
||||||
|
"""Check if agent role is compatible with required role"""
|
||||||
|
compatibility_map = {
|
||||||
|
"frontend_developer": ["full_stack_engineer", "ui_ux_designer"],
|
||||||
|
"backend_developer": ["full_stack_engineer", "database_engineer"],
|
||||||
|
"qa_engineer": ["full_stack_engineer"],
|
||||||
|
"devops_engineer": ["systems_engineer", "backend_developer"],
|
||||||
|
"security_expert": ["backend_developer", "senior_software_architect"],
|
||||||
|
"ui_ux_designer": ["frontend_developer"],
|
||||||
|
"technical_writer": ["full_stack_engineer"],
|
||||||
|
"database_engineer": ["backend_developer", "full_stack_engineer"],
|
||||||
|
}
|
||||||
|
|
||||||
|
compatible_roles = compatibility_map.get(required_role, [])
|
||||||
|
return agent_role in compatible_roles
|
||||||
|
|
||||||
|
async def _log_task_assignment(self, db: Session, project: Project, task: Dict, agent: Dict, reason: str):
|
||||||
|
"""Log task assignment for tracking"""
|
||||||
|
try:
|
||||||
|
# This would insert into task_assignments table
|
||||||
|
# For now, just log it
|
||||||
|
logger.info(f"Task assignment: Project={project.name}, Task={task['number']}, "
|
||||||
|
f"Agent={agent['id']}, Role={agent['role']}, Reason={reason}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to log task assignment: {str(e)}")
|
||||||
|
|
||||||
|
async def get_project_task_stats(self, db: Session, project_id: int) -> Dict[str, Any]:
|
||||||
|
"""Get task statistics for a project"""
|
||||||
|
try:
|
||||||
|
project = db.query(Project).filter(Project.id == project_id).first()
|
||||||
|
if not project:
|
||||||
|
return {"error": "Project not found"}
|
||||||
|
|
||||||
|
# Get recent sync info
|
||||||
|
last_sync = self._last_sync.get(project_id)
|
||||||
|
|
||||||
|
# Count cached tasks for this project
|
||||||
|
project_tasks = [
|
||||||
|
task_info for task_key, task_info in self._task_cache.items()
|
||||||
|
if task_key.startswith(f"{project_id}:")
|
||||||
|
]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"project_id": project_id,
|
||||||
|
"project_name": project.name,
|
||||||
|
"provider": project.provider or "github",
|
||||||
|
"last_sync": last_sync.isoformat() if last_sync else None,
|
||||||
|
"cached_tasks": len(project_tasks),
|
||||||
|
"bzzz_enabled": project.bzzz_enabled,
|
||||||
|
"auto_assignment": getattr(project, "auto_assignment", True)
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting project task stats: {str(e)}")
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
async def cleanup_old_cache(self, max_age_hours: int = 24):
|
||||||
|
"""Clean up old task cache entries"""
|
||||||
|
cutoff_time = datetime.now() - timedelta(hours=max_age_hours)
|
||||||
|
|
||||||
|
to_remove = []
|
||||||
|
for task_key, task_info in self._task_cache.items():
|
||||||
|
if task_info["assigned_at"] < cutoff_time:
|
||||||
|
to_remove.append(task_key)
|
||||||
|
|
||||||
|
for key in to_remove:
|
||||||
|
del self._task_cache[key]
|
||||||
|
|
||||||
|
logger.info(f"Cleaned up {len(to_remove)} old task cache entries")
|
||||||
|
|
||||||
|
# Global instance
|
||||||
|
repository_service = RepositoryService()
|
||||||
142
backend/migrations/005_add_gitea_repositories.sql
Normal file
142
backend/migrations/005_add_gitea_repositories.sql
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
-- Migration to add Gitea repositories and update existing projects
|
||||||
|
|
||||||
|
-- Add provider field to projects table to distinguish between GitHub and Gitea
|
||||||
|
ALTER TABLE projects ADD COLUMN provider VARCHAR(50) DEFAULT 'github';
|
||||||
|
ALTER TABLE projects ADD COLUMN provider_base_url VARCHAR(255);
|
||||||
|
ALTER TABLE projects ADD COLUMN ssh_port INTEGER DEFAULT 22;
|
||||||
|
|
||||||
|
-- Add Gitea-specific configuration
|
||||||
|
ALTER TABLE projects ADD COLUMN gitea_enabled BOOLEAN DEFAULT false;
|
||||||
|
ALTER TABLE projects ADD COLUMN webhook_secret VARCHAR(255);
|
||||||
|
ALTER TABLE projects ADD COLUMN auto_assignment BOOLEAN DEFAULT true;
|
||||||
|
|
||||||
|
-- Update existing projects to mark them as GitHub
|
||||||
|
UPDATE projects SET provider = 'github', provider_base_url = 'https://github.com' WHERE provider IS NULL;
|
||||||
|
|
||||||
|
-- Add Gitea repositories
|
||||||
|
INSERT INTO projects (
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
status,
|
||||||
|
github_repo,
|
||||||
|
git_url,
|
||||||
|
git_owner,
|
||||||
|
git_repository,
|
||||||
|
git_branch,
|
||||||
|
bzzz_enabled,
|
||||||
|
ready_to_claim,
|
||||||
|
private_repo,
|
||||||
|
github_token_required,
|
||||||
|
provider,
|
||||||
|
provider_base_url,
|
||||||
|
ssh_port,
|
||||||
|
gitea_enabled,
|
||||||
|
auto_assignment
|
||||||
|
) VALUES
|
||||||
|
(
|
||||||
|
'hive-gitea',
|
||||||
|
'Distributed task coordination system with AI agents (Gitea)',
|
||||||
|
'active',
|
||||||
|
'tony/hive',
|
||||||
|
'ssh://git@192.168.1.113:2222/tony/hive.git',
|
||||||
|
'tony',
|
||||||
|
'hive',
|
||||||
|
'master',
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
'gitea',
|
||||||
|
'http://192.168.1.113:3000',
|
||||||
|
2222,
|
||||||
|
true,
|
||||||
|
true
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'bzzz-gitea',
|
||||||
|
'P2P collaborative development coordination system (Gitea)',
|
||||||
|
'active',
|
||||||
|
'tony/bzzz',
|
||||||
|
'ssh://git@192.168.1.113:2222/tony/bzzz.git',
|
||||||
|
'tony',
|
||||||
|
'bzzz',
|
||||||
|
'main',
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
'gitea',
|
||||||
|
'http://192.168.1.113:3000',
|
||||||
|
2222,
|
||||||
|
true,
|
||||||
|
true
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create repository_config table for provider-specific configuration
|
||||||
|
CREATE TABLE repository_config (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
project_id INTEGER REFERENCES projects(id) ON DELETE CASCADE,
|
||||||
|
provider VARCHAR(50) NOT NULL,
|
||||||
|
config_data JSONB NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Insert default Gitea configuration for our repositories
|
||||||
|
INSERT INTO repository_config (project_id, provider, config_data)
|
||||||
|
SELECT
|
||||||
|
p.id,
|
||||||
|
'gitea',
|
||||||
|
jsonb_build_object(
|
||||||
|
'base_url', p.provider_base_url,
|
||||||
|
'owner', p.git_owner,
|
||||||
|
'repository', p.git_repository,
|
||||||
|
'task_label', 'bzzz-task',
|
||||||
|
'in_progress_label', 'in-progress',
|
||||||
|
'completed_label', 'completed',
|
||||||
|
'base_branch', p.git_branch,
|
||||||
|
'branch_prefix', 'bzzz/task-',
|
||||||
|
'auto_assignment', p.auto_assignment,
|
||||||
|
'ssh_port', p.ssh_port
|
||||||
|
)
|
||||||
|
FROM projects p
|
||||||
|
WHERE p.provider = 'gitea';
|
||||||
|
|
||||||
|
-- Create task assignment log table
|
||||||
|
CREATE TABLE task_assignments (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
project_id INTEGER REFERENCES projects(id),
|
||||||
|
task_number INTEGER NOT NULL,
|
||||||
|
agent_id VARCHAR(255) NOT NULL,
|
||||||
|
agent_role VARCHAR(255),
|
||||||
|
assignment_reason TEXT,
|
||||||
|
status VARCHAR(50) DEFAULT 'assigned', -- assigned, in_progress, completed, failed
|
||||||
|
assigned_at TIMESTAMP DEFAULT NOW(),
|
||||||
|
started_at TIMESTAMP,
|
||||||
|
completed_at TIMESTAMP,
|
||||||
|
results JSONB,
|
||||||
|
error_message TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create indexes for task assignments
|
||||||
|
CREATE INDEX idx_task_assignments_project ON task_assignments(project_id);
|
||||||
|
CREATE INDEX idx_task_assignments_agent ON task_assignments(agent_id);
|
||||||
|
CREATE INDEX idx_task_assignments_status ON task_assignments(status);
|
||||||
|
CREATE INDEX idx_task_assignments_task ON task_assignments(project_id, task_number);
|
||||||
|
|
||||||
|
-- Create webhook events table for tracking repository events
|
||||||
|
CREATE TABLE webhook_events (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
project_id INTEGER REFERENCES projects(id),
|
||||||
|
event_type VARCHAR(100) NOT NULL,
|
||||||
|
payload JSONB NOT NULL,
|
||||||
|
processed BOOLEAN DEFAULT false,
|
||||||
|
processed_at TIMESTAMP,
|
||||||
|
created_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create indexes for webhook events
|
||||||
|
CREATE INDEX idx_webhook_events_project ON webhook_events(project_id);
|
||||||
|
CREATE INDEX idx_webhook_events_type ON webhook_events(event_type);
|
||||||
|
CREATE INDEX idx_webhook_events_processed ON webhook_events(processed);
|
||||||
|
CREATE INDEX idx_webhook_events_created ON webhook_events(created_at);
|
||||||
30
backend/migrations/006_add_gitea_support.sql
Normal file
30
backend/migrations/006_add_gitea_support.sql
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
-- Migration 006: Add Gitea and Multi-Provider Support
|
||||||
|
-- This migration adds fields for supporting multiple Git providers like Gitea
|
||||||
|
|
||||||
|
-- Add new columns to projects table
|
||||||
|
ALTER TABLE projects
|
||||||
|
ADD COLUMN IF NOT EXISTS provider VARCHAR(50) DEFAULT 'github';
|
||||||
|
|
||||||
|
ALTER TABLE projects
|
||||||
|
ADD COLUMN IF NOT EXISTS provider_base_url VARCHAR(255) NULL;
|
||||||
|
|
||||||
|
ALTER TABLE projects
|
||||||
|
ADD COLUMN IF NOT EXISTS auto_assignment BOOLEAN DEFAULT true;
|
||||||
|
|
||||||
|
-- Rename metadata column to avoid SQLAlchemy conflict
|
||||||
|
ALTER TABLE projects
|
||||||
|
RENAME COLUMN metadata TO project_metadata;
|
||||||
|
|
||||||
|
-- Update existing records to have default provider
|
||||||
|
UPDATE projects SET provider = 'github' WHERE provider IS NULL;
|
||||||
|
|
||||||
|
-- Create index for provider for better queries
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_projects_provider ON projects(provider);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_projects_bzzz_enabled ON projects(bzzz_enabled);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_projects_auto_assignment ON projects(auto_assignment);
|
||||||
|
|
||||||
|
-- Add comments for documentation
|
||||||
|
COMMENT ON COLUMN projects.provider IS 'Git provider type: github, gitea, gitlab, etc.';
|
||||||
|
COMMENT ON COLUMN projects.provider_base_url IS 'Base URL for self-hosted providers like Gitea';
|
||||||
|
COMMENT ON COLUMN projects.auto_assignment IS 'Enable automatic task assignment to agents';
|
||||||
|
COMMENT ON COLUMN projects.project_metadata IS 'Additional project metadata as JSON';
|
||||||
@@ -117,7 +117,7 @@ export interface APIError {
|
|||||||
|
|
||||||
// Unified API configuration
|
// Unified API configuration
|
||||||
export const API_CONFIG = {
|
export const API_CONFIG = {
|
||||||
BASE_URL: process.env.VITE_API_BASE_URL || 'https://hive.home.deepblack.cloud',
|
BASE_URL: process.env.VITE_API_BASE_URL || 'https://api.hive.home.deepblack.cloud',
|
||||||
TIMEOUT: 30000,
|
TIMEOUT: 30000,
|
||||||
RETRY_ATTEMPTS: 3,
|
RETRY_ATTEMPTS: 3,
|
||||||
RETRY_DELAY: 1000,
|
RETRY_DELAY: 1000,
|
||||||
|
|||||||
@@ -4,24 +4,54 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
REGISTRY="anthonyrawlins"
|
LOCAL_REGISTRY="registry.home.deepblack.cloud"
|
||||||
|
REGISTRY_PORT="5000"
|
||||||
|
NAMESPACE="tony"
|
||||||
BACKEND_IMAGE="hive-backend"
|
BACKEND_IMAGE="hive-backend"
|
||||||
FRONTEND_IMAGE="hive-frontend"
|
FRONTEND_IMAGE="hive-frontend"
|
||||||
LOCAL_BACKEND="hive-hive-backend"
|
|
||||||
LOCAL_FRONTEND="hive-hive-frontend"
|
|
||||||
|
|
||||||
echo "🏗️ Building and pushing Hive images to Docker Hub..."
|
echo "🏗️ Building and pushing Hive images to local registry..."
|
||||||
|
|
||||||
|
# Change to hive directory
|
||||||
|
cd "$(dirname "$0")"
|
||||||
|
|
||||||
|
# Build images with docker compose
|
||||||
|
echo "🔨 Building images with docker compose..."
|
||||||
|
docker compose -f docker-compose.swarm.yml build
|
||||||
|
|
||||||
|
# Get the actual image names from docker compose
|
||||||
|
BACKEND_COMPOSE_IMAGE=$(docker compose -f docker-compose.swarm.yml config | grep "image.*hive-backend" | cut -d: -f2- | xargs)
|
||||||
|
FRONTEND_COMPOSE_IMAGE=$(docker compose -f docker-compose.swarm.yml config | grep "image.*hive-frontend" | cut -d: -f2- | xargs)
|
||||||
|
|
||||||
|
echo "📦 Found backend image: $BACKEND_COMPOSE_IMAGE"
|
||||||
|
echo "📦 Found frontend image: $FRONTEND_COMPOSE_IMAGE"
|
||||||
|
|
||||||
# Tag and push backend
|
# Tag and push backend
|
||||||
echo "📦 Pushing backend image..."
|
echo "📦 Tagging and pushing backend image..."
|
||||||
docker tag ${LOCAL_BACKEND}:latest ${REGISTRY}/${BACKEND_IMAGE}:latest
|
if [[ "$BACKEND_COMPOSE_IMAGE" != "${LOCAL_REGISTRY}/${NAMESPACE}/${BACKEND_IMAGE}:latest" ]]; then
|
||||||
docker push ${REGISTRY}/${BACKEND_IMAGE}:latest
|
# If the compose image is locally built, tag it for registry
|
||||||
|
LOCAL_BACKEND_IMAGE=$(docker images --format "table {{.Repository}}:{{.Tag}}" | grep hive.*backend | head -1)
|
||||||
|
if [[ -n "$LOCAL_BACKEND_IMAGE" ]]; then
|
||||||
|
docker tag "$LOCAL_BACKEND_IMAGE" "${LOCAL_REGISTRY}/${NAMESPACE}/${BACKEND_IMAGE}:latest"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
docker push "${LOCAL_REGISTRY}/${NAMESPACE}/${BACKEND_IMAGE}:latest"
|
||||||
|
|
||||||
# Tag and push frontend
|
# Tag and push frontend
|
||||||
echo "📦 Pushing frontend image..."
|
echo "📦 Tagging and pushing frontend image..."
|
||||||
docker tag ${LOCAL_FRONTEND}:latest ${REGISTRY}/${FRONTEND_IMAGE}:latest
|
if [[ "$FRONTEND_COMPOSE_IMAGE" != "${LOCAL_REGISTRY}/${NAMESPACE}/${FRONTEND_IMAGE}:latest" ]]; then
|
||||||
docker push ${REGISTRY}/${FRONTEND_IMAGE}:latest
|
# If the compose image is locally built, tag it for registry
|
||||||
|
LOCAL_FRONTEND_IMAGE=$(docker images --format "table {{.Repository}}:{{.Tag}}" | grep hive.*frontend | head -1)
|
||||||
|
if [[ -n "$LOCAL_FRONTEND_IMAGE" ]]; then
|
||||||
|
docker tag "$LOCAL_FRONTEND_IMAGE" "${LOCAL_REGISTRY}/${NAMESPACE}/${FRONTEND_IMAGE}:latest"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
docker push "${LOCAL_REGISTRY}/${NAMESPACE}/${FRONTEND_IMAGE}:latest"
|
||||||
|
|
||||||
echo "✅ Images pushed to Docker Hub successfully!"
|
echo "✅ Images pushed to local registry successfully!"
|
||||||
echo "Backend: ${REGISTRY}/${BACKEND_IMAGE}:latest"
|
echo "Backend: ${LOCAL_REGISTRY}/${NAMESPACE}/${BACKEND_IMAGE}:latest"
|
||||||
echo "Frontend: ${REGISTRY}/${FRONTEND_IMAGE}:latest"
|
echo "Frontend: ${LOCAL_REGISTRY}/${NAMESPACE}/${FRONTEND_IMAGE}:latest"
|
||||||
|
echo ""
|
||||||
|
echo "🔍 Verify images in registry:"
|
||||||
|
echo "curl -X GET http://localhost:${REGISTRY_PORT}/v2/${NAMESPACE}/${BACKEND_IMAGE}/tags/list"
|
||||||
|
echo "curl -X GET http://localhost:${REGISTRY_PORT}/v2/${NAMESPACE}/${FRONTEND_IMAGE}/tags/list"
|
||||||
Reference in New Issue
Block a user