Major updates and improvements to BZZZ system
- Updated configuration and deployment files - Improved system architecture and components - Enhanced documentation and testing - Fixed various issues and added new features 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
197
deployments/internal-docker/hcfs-base/scripts/entrypoint.sh
Normal file
197
deployments/internal-docker/hcfs-base/scripts/entrypoint.sh
Normal file
@@ -0,0 +1,197 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# HCFS Agent Container Entrypoint
|
||||
echo "🚀 Starting HCFS-enabled agent container..."
|
||||
|
||||
# Environment validation
|
||||
AGENT_ID="${AGENT_ID:-agent-$(hostname)}"
|
||||
TASK_ID="${TASK_ID:-task-$(date +%s)}"
|
||||
HCFS_API_URL="${HCFS_API_URL:-http://host.docker.internal:8000}"
|
||||
HCFS_ENABLED="${HCFS_ENABLED:-true}"
|
||||
|
||||
echo "📋 Container Configuration:"
|
||||
echo " Agent ID: $AGENT_ID"
|
||||
echo " Task ID: $TASK_ID"
|
||||
echo " HCFS API: $HCFS_API_URL"
|
||||
echo " HCFS Enabled: $HCFS_ENABLED"
|
||||
|
||||
# Function to wait for HCFS API
|
||||
wait_for_hcfs() {
|
||||
local max_attempts=30
|
||||
local attempt=0
|
||||
|
||||
echo "⏳ Waiting for HCFS API to be available..."
|
||||
|
||||
while [ $attempt -lt $max_attempts ]; do
|
||||
if curl -s "$HCFS_API_URL/health" > /dev/null 2>&1; then
|
||||
echo "✅ HCFS API is available"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo " Attempt $((attempt + 1))/$max_attempts - HCFS API not ready"
|
||||
sleep 2
|
||||
attempt=$((attempt + 1))
|
||||
done
|
||||
|
||||
echo "❌ HCFS API failed to become available after $max_attempts attempts"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Function to initialize HCFS workspace
|
||||
init_hcfs_workspace() {
|
||||
echo "🔧 Initializing HCFS workspace..."
|
||||
|
||||
# Create workspace context in HCFS
|
||||
local workspace_path="/agents/$AGENT_ID/workspaces/$(date +%s)"
|
||||
local context_data=$(cat <<EOF
|
||||
{
|
||||
"path": "$workspace_path",
|
||||
"content": "Agent workspace for container $(hostname)",
|
||||
"summary": "Agent $AGENT_ID workspace - Task $TASK_ID",
|
||||
"metadata": {
|
||||
"agent_id": "$AGENT_ID",
|
||||
"task_id": "$TASK_ID",
|
||||
"container_id": "$(hostname)",
|
||||
"created_at": "$(date -Iseconds)",
|
||||
"workspace_type": "agent_container"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Create context via HCFS API
|
||||
local response=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$context_data" \
|
||||
"$HCFS_API_URL/contexts" || echo "")
|
||||
|
||||
if [ -n "$response" ]; then
|
||||
echo "✅ HCFS workspace context created: $workspace_path"
|
||||
echo "$workspace_path" > /tmp/hcfs-workspace-path
|
||||
return 0
|
||||
else
|
||||
echo "⚠️ Failed to create HCFS workspace context, using local storage"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to mount HCFS
|
||||
mount_hcfs() {
|
||||
local workspace_path="$1"
|
||||
|
||||
echo "🔗 Mounting HCFS workspace: $workspace_path"
|
||||
|
||||
# For now, create a symbolic structure since we don't have full FUSE implementation
|
||||
# In production, this would be: fusermount3 -o allow_other "$workspace_path" /mnt/hcfs
|
||||
|
||||
mkdir -p /mnt/hcfs
|
||||
mkdir -p /home/agent/work/{src,build,output,logs}
|
||||
|
||||
# Create workspace metadata
|
||||
cat > /home/agent/work/.hcfs-workspace << EOF
|
||||
HCFS_WORKSPACE_PATH=$workspace_path
|
||||
HCFS_API_URL=$HCFS_API_URL
|
||||
AGENT_ID=$AGENT_ID
|
||||
TASK_ID=$TASK_ID
|
||||
CREATED_AT=$(date -Iseconds)
|
||||
EOF
|
||||
|
||||
# Set ownership
|
||||
chown -R agent:agent /home/agent/work /mnt/hcfs
|
||||
|
||||
echo "✅ HCFS workspace mounted and configured"
|
||||
}
|
||||
|
||||
# Function to setup development environment
|
||||
setup_dev_environment() {
|
||||
echo "🛠️ Setting up development environment..."
|
||||
|
||||
# Create standard development directories
|
||||
sudo -u agent mkdir -p /home/agent/{.local/bin,.config,.cache,work/{src,tests,docs,scripts}}
|
||||
|
||||
# Set up git configuration if provided
|
||||
if [ -n "${GIT_USER_NAME:-}" ] && [ -n "${GIT_USER_EMAIL:-}" ]; then
|
||||
sudo -u agent git config --global user.name "$GIT_USER_NAME"
|
||||
sudo -u agent git config --global user.email "$GIT_USER_EMAIL"
|
||||
echo "✅ Git configuration set: $GIT_USER_NAME <$GIT_USER_EMAIL>"
|
||||
fi
|
||||
|
||||
# Set up Python virtual environment
|
||||
if [ "${SETUP_PYTHON_VENV:-true}" = "true" ]; then
|
||||
sudo -u agent python3 -m venv /home/agent/.venv
|
||||
echo "✅ Python virtual environment created"
|
||||
fi
|
||||
|
||||
echo "✅ Development environment ready"
|
||||
}
|
||||
|
||||
# Function to start background services
|
||||
start_background_services() {
|
||||
echo "🔄 Starting background services..."
|
||||
|
||||
# Start HCFS workspace sync daemon (if needed)
|
||||
if [ "$HCFS_ENABLED" = "true" ] && [ -f /tmp/hcfs-workspace-path ]; then
|
||||
/opt/hcfs/hcfs-workspace.sh daemon &
|
||||
echo "✅ HCFS workspace sync daemon started"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to cleanup on exit
|
||||
cleanup() {
|
||||
echo "🧹 Container cleanup initiated..."
|
||||
|
||||
if [ "$HCFS_ENABLED" = "true" ] && [ -f /tmp/hcfs-workspace-path ]; then
|
||||
echo "💾 Storing final workspace state to HCFS..."
|
||||
/opt/hcfs/hcfs-workspace.sh finalize
|
||||
fi
|
||||
|
||||
echo "✅ Cleanup completed"
|
||||
}
|
||||
|
||||
# Set up signal handlers for graceful shutdown
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
# Main initialization sequence
|
||||
main() {
|
||||
echo "🏁 Starting HCFS Agent Container initialization..."
|
||||
|
||||
# Wait for HCFS if enabled
|
||||
if [ "$HCFS_ENABLED" = "true" ]; then
|
||||
if wait_for_hcfs; then
|
||||
if init_hcfs_workspace; then
|
||||
local workspace_path=$(cat /tmp/hcfs-workspace-path)
|
||||
mount_hcfs "$workspace_path"
|
||||
else
|
||||
echo "⚠️ HCFS workspace initialization failed, continuing with local storage"
|
||||
fi
|
||||
else
|
||||
echo "⚠️ HCFS API unavailable, continuing with local storage"
|
||||
fi
|
||||
else
|
||||
echo "ℹ️ HCFS disabled, using local storage only"
|
||||
fi
|
||||
|
||||
# Set up development environment
|
||||
setup_dev_environment
|
||||
|
||||
# Start background services
|
||||
start_background_services
|
||||
|
||||
echo "🎉 HCFS Agent Container initialization complete!"
|
||||
echo "📁 Workspace: /home/agent/work"
|
||||
echo "🔧 Agent: $AGENT_ID"
|
||||
echo "📋 Task: $TASK_ID"
|
||||
|
||||
# Execute the provided command or start interactive shell
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "🔧 Starting interactive shell..."
|
||||
exec sudo -u agent -i /bin/bash
|
||||
else
|
||||
echo "🚀 Executing command: $*"
|
||||
exec sudo -u agent "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Execute main function
|
||||
main "$@"
|
||||
242
deployments/internal-docker/hcfs-base/scripts/hcfs-workspace.sh
Normal file
242
deployments/internal-docker/hcfs-base/scripts/hcfs-workspace.sh
Normal file
@@ -0,0 +1,242 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# HCFS Workspace Management Script
|
||||
# Handles workspace synchronization and artifact collection
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
WORKSPACE_DIR="/home/agent/work"
|
||||
HCFS_CONFIG="/home/agent/work/.hcfs-workspace"
|
||||
|
||||
# Load workspace configuration
|
||||
if [ -f "$HCFS_CONFIG" ]; then
|
||||
source "$HCFS_CONFIG"
|
||||
else
|
||||
echo "⚠️ No HCFS workspace configuration found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Logging function
|
||||
log() {
|
||||
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $1" | tee -a /var/log/hcfs/workspace.log
|
||||
}
|
||||
|
||||
# Function to store artifact in HCFS
|
||||
store_artifact() {
|
||||
local artifact_path="$1"
|
||||
local artifact_name="$2"
|
||||
local content="$3"
|
||||
|
||||
local hcfs_artifact_path="${HCFS_WORKSPACE_PATH}/artifacts/${artifact_name}"
|
||||
|
||||
local artifact_data=$(cat <<EOF
|
||||
{
|
||||
"path": "$hcfs_artifact_path",
|
||||
"content": "$content",
|
||||
"summary": "Artifact: $artifact_name",
|
||||
"metadata": {
|
||||
"agent_id": "$AGENT_ID",
|
||||
"task_id": "$TASK_ID",
|
||||
"artifact_name": "$artifact_name",
|
||||
"artifact_type": "workspace_output",
|
||||
"file_path": "$artifact_path",
|
||||
"created_at": "$(date -Iseconds)"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
local response=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$artifact_data" \
|
||||
"$HCFS_API_URL/contexts" || echo "")
|
||||
|
||||
if [ -n "$response" ]; then
|
||||
log "✅ Stored artifact: $artifact_name -> $hcfs_artifact_path"
|
||||
return 0
|
||||
else
|
||||
log "❌ Failed to store artifact: $artifact_name"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to collect and store workspace artifacts
|
||||
collect_artifacts() {
|
||||
log "📦 Collecting workspace artifacts..."
|
||||
|
||||
local artifact_count=0
|
||||
|
||||
# Common artifact patterns
|
||||
local artifact_patterns=(
|
||||
"*.log"
|
||||
"*.md"
|
||||
"*.txt"
|
||||
"*.json"
|
||||
"*.yaml"
|
||||
"*.yml"
|
||||
"output/*"
|
||||
"build/*.json"
|
||||
"build/*.xml"
|
||||
"results/*"
|
||||
"./**/README*"
|
||||
"./**/CHANGELOG*"
|
||||
"./**/requirements*.txt"
|
||||
"./**/package*.json"
|
||||
"./**/Cargo.toml"
|
||||
"./**/go.mod"
|
||||
"./**/pom.xml"
|
||||
)
|
||||
|
||||
for pattern in "${artifact_patterns[@]}"; do
|
||||
while IFS= read -r -d '' file; do
|
||||
if [ -f "$file" ] && [ -s "$file" ]; then
|
||||
local relative_path="${file#$WORKSPACE_DIR/}"
|
||||
local content=$(base64 -w 0 "$file" 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$content" ] && [ ${#content} -lt 1000000 ]; then # Limit to 1MB
|
||||
if store_artifact "$relative_path" "$relative_path" "$content"; then
|
||||
artifact_count=$((artifact_count + 1))
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done < <(find "$WORKSPACE_DIR" -name "$pattern" -type f -print0 2>/dev/null || true)
|
||||
done
|
||||
|
||||
log "✅ Collected $artifact_count artifacts"
|
||||
}
|
||||
|
||||
# Function to update workspace status in HCFS
|
||||
update_workspace_status() {
|
||||
local status="$1"
|
||||
local message="$2"
|
||||
|
||||
local status_data=$(cat <<EOF
|
||||
{
|
||||
"path": "${HCFS_WORKSPACE_PATH}/status",
|
||||
"content": "$message",
|
||||
"summary": "Workspace status: $status",
|
||||
"metadata": {
|
||||
"agent_id": "$AGENT_ID",
|
||||
"task_id": "$TASK_ID",
|
||||
"status": "$status",
|
||||
"timestamp": "$(date -Iseconds)",
|
||||
"hostname": "$(hostname)",
|
||||
"workspace_dir": "$WORKSPACE_DIR"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$status_data" \
|
||||
"$HCFS_API_URL/contexts" > /dev/null || true
|
||||
|
||||
log "📊 Updated workspace status: $status"
|
||||
}
|
||||
|
||||
# Function to sync workspace changes
|
||||
sync_workspace() {
|
||||
log "🔄 Syncing workspace changes..."
|
||||
|
||||
# Create workspace summary
|
||||
local file_count=$(find "$WORKSPACE_DIR" -type f 2>/dev/null | wc -l)
|
||||
local dir_count=$(find "$WORKSPACE_DIR" -type d 2>/dev/null | wc -l)
|
||||
local total_size=$(du -sb "$WORKSPACE_DIR" 2>/dev/null | cut -f1 || echo "0")
|
||||
|
||||
local summary=$(cat <<EOF
|
||||
Workspace Summary ($(date -Iseconds)):
|
||||
- Files: $file_count
|
||||
- Directories: $dir_count
|
||||
- Total Size: $total_size bytes
|
||||
- Agent: $AGENT_ID
|
||||
- Task: $TASK_ID
|
||||
- Container: $(hostname)
|
||||
|
||||
Recent Activity:
|
||||
$(ls -la "$WORKSPACE_DIR" 2>/dev/null | head -10 || echo "No files")
|
||||
EOF
|
||||
)
|
||||
|
||||
update_workspace_status "active" "$summary"
|
||||
}
|
||||
|
||||
# Function to finalize workspace
|
||||
finalize_workspace() {
|
||||
log "🏁 Finalizing workspace..."
|
||||
|
||||
# Collect all artifacts
|
||||
collect_artifacts
|
||||
|
||||
# Create final summary
|
||||
local completion_summary=$(cat <<EOF
|
||||
Workspace Completion Summary:
|
||||
- Agent ID: $AGENT_ID
|
||||
- Task ID: $TASK_ID
|
||||
- Container: $(hostname)
|
||||
- Started: $CREATED_AT
|
||||
- Completed: $(date -Iseconds)
|
||||
- Duration: $(($(date +%s) - $(date -d "$CREATED_AT" +%s 2>/dev/null || echo "0"))) seconds
|
||||
|
||||
Final Workspace Contents:
|
||||
$(find "$WORKSPACE_DIR" -type f 2>/dev/null | head -20 || echo "No files")
|
||||
|
||||
Artifacts Collected:
|
||||
$(ls "$WORKSPACE_DIR"/{output,build,logs,results}/* 2>/dev/null | head -10 || echo "No artifacts")
|
||||
EOF
|
||||
)
|
||||
|
||||
update_workspace_status "completed" "$completion_summary"
|
||||
log "✅ Workspace finalized"
|
||||
}
|
||||
|
||||
# Daemon mode for continuous sync
|
||||
daemon_mode() {
|
||||
log "🔄 Starting HCFS workspace sync daemon..."
|
||||
|
||||
local sync_interval=30 # seconds
|
||||
local last_sync=0
|
||||
|
||||
while true; do
|
||||
local current_time=$(date +%s)
|
||||
|
||||
if [ $((current_time - last_sync)) -ge $sync_interval ]; then
|
||||
sync_workspace
|
||||
last_sync=$current_time
|
||||
fi
|
||||
|
||||
sleep 5
|
||||
done
|
||||
}
|
||||
|
||||
# Main command dispatcher
|
||||
case "${1:-help}" in
|
||||
"sync")
|
||||
sync_workspace
|
||||
;;
|
||||
"collect")
|
||||
collect_artifacts
|
||||
;;
|
||||
"finalize")
|
||||
finalize_workspace
|
||||
;;
|
||||
"daemon")
|
||||
daemon_mode
|
||||
;;
|
||||
"status")
|
||||
update_workspace_status "active" "Status check at $(date -Iseconds)"
|
||||
;;
|
||||
"help"|*)
|
||||
echo "HCFS Workspace Management Script"
|
||||
echo ""
|
||||
echo "Usage: $0 {sync|collect|finalize|daemon|status|help}"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " sync - Sync current workspace state to HCFS"
|
||||
echo " collect - Collect and store artifacts in HCFS"
|
||||
echo " finalize - Finalize workspace and store all artifacts"
|
||||
echo " daemon - Run continuous sync daemon"
|
||||
echo " status - Update workspace status in HCFS"
|
||||
echo " help - Show this help message"
|
||||
;;
|
||||
esac
|
||||
Reference in New Issue
Block a user