WIP: Save agent roles integration work before CHORUS rebrand
- Agent roles and coordination features - Chat API integration testing - New configuration and workspace management 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
131
docker/hcfs-base/Dockerfile
Normal file
131
docker/hcfs-base/Dockerfile
Normal file
@@ -0,0 +1,131 @@
|
||||
# HCFS Base Image - Production-ready environment with HCFS integration
|
||||
FROM ubuntu:22.04
|
||||
|
||||
LABEL maintainer="anthony@deepblack.cloud"
|
||||
LABEL description="HCFS-integrated base image for AI agent development environments"
|
||||
LABEL version="1.0.0"
|
||||
|
||||
# Prevent interactive prompts during package installation
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV TERM=xterm-256color
|
||||
|
||||
# Set up standard environment
|
||||
ENV HCFS_WORKSPACE_ROOT=/workspace
|
||||
ENV HCFS_MOUNT_POINT=/mnt/hcfs
|
||||
ENV HCFS_API_URL=http://host.docker.internal:8000
|
||||
ENV HCFS_ENABLED=true
|
||||
ENV PYTHONPATH=/usr/local/lib/python3.10/site-packages:$PYTHONPATH
|
||||
|
||||
# Create agent user for sandboxed execution
|
||||
RUN groupadd -r agent && useradd -r -g agent -d /home/agent -s /bin/bash agent
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
# Core system tools
|
||||
curl \
|
||||
wget \
|
||||
git \
|
||||
make \
|
||||
build-essential \
|
||||
software-properties-common \
|
||||
gnupg2 \
|
||||
lsb-release \
|
||||
ca-certificates \
|
||||
apt-transport-https \
|
||||
# Development essentials
|
||||
vim \
|
||||
nano \
|
||||
tree \
|
||||
jq \
|
||||
zip \
|
||||
unzip \
|
||||
rsync \
|
||||
tmux \
|
||||
screen \
|
||||
htop \
|
||||
# Network tools
|
||||
net-tools \
|
||||
iputils-ping \
|
||||
dnsutils \
|
||||
# Python and pip
|
||||
python3 \
|
||||
python3-pip \
|
||||
python3-dev \
|
||||
python3-venv \
|
||||
# FUSE for HCFS mounting
|
||||
fuse3 \
|
||||
libfuse3-dev \
|
||||
# Additional utilities
|
||||
sqlite3 \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Set up Python symlinks
|
||||
RUN ln -sf /usr/bin/python3 /usr/bin/python && \
|
||||
ln -sf /usr/bin/pip3 /usr/bin/pip
|
||||
|
||||
# Install HCFS Python SDK and dependencies
|
||||
RUN pip install --no-cache-dir \
|
||||
httpx \
|
||||
websockets \
|
||||
fastapi \
|
||||
uvicorn \
|
||||
pydantic \
|
||||
python-multipart \
|
||||
aiofiles \
|
||||
sentence-transformers \
|
||||
numpy \
|
||||
scipy \
|
||||
scikit-learn \
|
||||
requests \
|
||||
pyyaml \
|
||||
toml \
|
||||
click
|
||||
|
||||
# Create directory structure
|
||||
RUN mkdir -p \
|
||||
/workspace \
|
||||
/mnt/hcfs \
|
||||
/home/agent \
|
||||
/home/agent/work \
|
||||
/home/agent/.local \
|
||||
/home/agent/.cache \
|
||||
/opt/hcfs \
|
||||
/etc/hcfs \
|
||||
/var/log/hcfs
|
||||
|
||||
# Set up HCFS integration scripts
|
||||
COPY scripts/hcfs-init.sh /opt/hcfs/
|
||||
COPY scripts/hcfs-mount.sh /opt/hcfs/
|
||||
COPY scripts/hcfs-workspace.sh /opt/hcfs/
|
||||
COPY scripts/entrypoint.sh /opt/hcfs/
|
||||
COPY config/hcfs-agent.yaml /etc/hcfs/
|
||||
|
||||
# Make scripts executable
|
||||
RUN chmod +x /opt/hcfs/*.sh
|
||||
|
||||
# Install HCFS client library
|
||||
COPY hcfs-client /opt/hcfs/client
|
||||
RUN cd /opt/hcfs/client && pip install -e .
|
||||
|
||||
# Set up agent workspace
|
||||
RUN chown -R agent:agent /home/agent /workspace /mnt/hcfs
|
||||
RUN chmod 755 /home/agent /workspace
|
||||
|
||||
# Configure sudo for agent user (needed for FUSE mounts)
|
||||
RUN echo "agent ALL=(ALL) NOPASSWD: /bin/mount, /bin/umount, /usr/bin/fusermount3" >> /etc/sudoers
|
||||
|
||||
# Set default working directory
|
||||
WORKDIR /home/agent/work
|
||||
|
||||
# Environment for development
|
||||
ENV HOME=/home/agent
|
||||
ENV USER=agent
|
||||
ENV SHELL=/bin/bash
|
||||
|
||||
# Expose standard ports for development services
|
||||
EXPOSE 8080 8000 3000 5000
|
||||
|
||||
# Set up entrypoint that initializes HCFS workspace
|
||||
ENTRYPOINT ["/opt/hcfs/entrypoint.sh"]
|
||||
CMD ["/bin/bash"]
|
||||
137
docker/hcfs-base/config/hcfs-agent.yaml
Normal file
137
docker/hcfs-base/config/hcfs-agent.yaml
Normal file
@@ -0,0 +1,137 @@
|
||||
# HCFS Agent Configuration
|
||||
# This configuration is used by agents running in HCFS-enabled containers
|
||||
|
||||
hcfs:
|
||||
# HCFS API Configuration
|
||||
api:
|
||||
url: "http://host.docker.internal:8000"
|
||||
timeout: 30s
|
||||
retry_count: 3
|
||||
|
||||
# Workspace Configuration
|
||||
workspace:
|
||||
root: "/home/agent/work"
|
||||
mount_point: "/mnt/hcfs"
|
||||
auto_sync: true
|
||||
sync_interval: 30s
|
||||
|
||||
# Artifact Collection
|
||||
artifacts:
|
||||
enabled: true
|
||||
patterns:
|
||||
- "*.log"
|
||||
- "*.md"
|
||||
- "*.txt"
|
||||
- "*.json"
|
||||
- "*.yaml"
|
||||
- "output/*"
|
||||
- "build/*.json"
|
||||
- "results/*"
|
||||
max_size: "10MB"
|
||||
compress: false
|
||||
|
||||
# Cleanup Configuration
|
||||
cleanup:
|
||||
idle_timeout: "1h"
|
||||
auto_cleanup: true
|
||||
preserve_artifacts: true
|
||||
|
||||
# Agent Capabilities
|
||||
agent:
|
||||
capabilities:
|
||||
- "file_operations"
|
||||
- "command_execution"
|
||||
- "artifact_collection"
|
||||
- "context_sharing"
|
||||
- "workspace_management"
|
||||
|
||||
# Resource Limits
|
||||
limits:
|
||||
max_memory: "2GB"
|
||||
max_cpu: "2.0"
|
||||
max_disk: "10GB"
|
||||
max_files: 10000
|
||||
|
||||
# Development Tools
|
||||
tools:
|
||||
python:
|
||||
enabled: true
|
||||
version: "3.10"
|
||||
venv: true
|
||||
packages:
|
||||
- "requests"
|
||||
- "pyyaml"
|
||||
- "click"
|
||||
- "rich"
|
||||
|
||||
git:
|
||||
enabled: true
|
||||
auto_config: true
|
||||
|
||||
make:
|
||||
enabled: true
|
||||
|
||||
docker:
|
||||
enabled: false # Disabled by default for security
|
||||
|
||||
# Security Configuration
|
||||
security:
|
||||
user: "agent"
|
||||
home: "/home/agent"
|
||||
shell: "/bin/bash"
|
||||
|
||||
# Network restrictions
|
||||
network:
|
||||
allow_outbound: true
|
||||
blocked_ports:
|
||||
- 22 # SSH
|
||||
- 3389 # RDP
|
||||
- 5432 # PostgreSQL
|
||||
- 3306 # MySQL
|
||||
|
||||
# File system restrictions
|
||||
filesystem:
|
||||
read_only_paths:
|
||||
- "/etc"
|
||||
- "/usr"
|
||||
- "/boot"
|
||||
writable_paths:
|
||||
- "/home/agent"
|
||||
- "/tmp"
|
||||
- "/workspace"
|
||||
- "/mnt/hcfs"
|
||||
|
||||
# Logging Configuration
|
||||
logging:
|
||||
level: "info"
|
||||
format: "json"
|
||||
destinations:
|
||||
- "/var/log/hcfs/agent.log"
|
||||
- "stdout"
|
||||
|
||||
# Log categories
|
||||
categories:
|
||||
workspace: "debug"
|
||||
artifacts: "info"
|
||||
hcfs_api: "info"
|
||||
security: "warn"
|
||||
|
||||
# Environment Variables
|
||||
environment:
|
||||
PYTHONPATH: "/usr/local/lib/python3.10/site-packages"
|
||||
PATH: "/home/agent/.local/bin:/usr/local/bin:/usr/bin:/bin"
|
||||
TERM: "xterm-256color"
|
||||
EDITOR: "vim"
|
||||
|
||||
# Container Metadata
|
||||
metadata:
|
||||
version: "1.0.0"
|
||||
created_by: "bzzz-hcfs-integration"
|
||||
description: "HCFS-enabled agent container for distributed AI development"
|
||||
|
||||
# Tags for categorization
|
||||
tags:
|
||||
- "ai-agent"
|
||||
- "hcfs-enabled"
|
||||
- "development"
|
||||
- "sandboxed"
|
||||
197
docker/hcfs-base/scripts/entrypoint.sh
Normal file
197
docker/hcfs-base/scripts/entrypoint.sh
Normal file
@@ -0,0 +1,197 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# HCFS Agent Container Entrypoint
|
||||
echo "🚀 Starting HCFS-enabled agent container..."
|
||||
|
||||
# Environment validation
|
||||
AGENT_ID="${AGENT_ID:-agent-$(hostname)}"
|
||||
TASK_ID="${TASK_ID:-task-$(date +%s)}"
|
||||
HCFS_API_URL="${HCFS_API_URL:-http://host.docker.internal:8000}"
|
||||
HCFS_ENABLED="${HCFS_ENABLED:-true}"
|
||||
|
||||
echo "📋 Container Configuration:"
|
||||
echo " Agent ID: $AGENT_ID"
|
||||
echo " Task ID: $TASK_ID"
|
||||
echo " HCFS API: $HCFS_API_URL"
|
||||
echo " HCFS Enabled: $HCFS_ENABLED"
|
||||
|
||||
# Function to wait for HCFS API
|
||||
wait_for_hcfs() {
|
||||
local max_attempts=30
|
||||
local attempt=0
|
||||
|
||||
echo "⏳ Waiting for HCFS API to be available..."
|
||||
|
||||
while [ $attempt -lt $max_attempts ]; do
|
||||
if curl -s "$HCFS_API_URL/health" > /dev/null 2>&1; then
|
||||
echo "✅ HCFS API is available"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo " Attempt $((attempt + 1))/$max_attempts - HCFS API not ready"
|
||||
sleep 2
|
||||
attempt=$((attempt + 1))
|
||||
done
|
||||
|
||||
echo "❌ HCFS API failed to become available after $max_attempts attempts"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Function to initialize HCFS workspace
|
||||
init_hcfs_workspace() {
|
||||
echo "🔧 Initializing HCFS workspace..."
|
||||
|
||||
# Create workspace context in HCFS
|
||||
local workspace_path="/agents/$AGENT_ID/workspaces/$(date +%s)"
|
||||
local context_data=$(cat <<EOF
|
||||
{
|
||||
"path": "$workspace_path",
|
||||
"content": "Agent workspace for container $(hostname)",
|
||||
"summary": "Agent $AGENT_ID workspace - Task $TASK_ID",
|
||||
"metadata": {
|
||||
"agent_id": "$AGENT_ID",
|
||||
"task_id": "$TASK_ID",
|
||||
"container_id": "$(hostname)",
|
||||
"created_at": "$(date -Iseconds)",
|
||||
"workspace_type": "agent_container"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Create context via HCFS API
|
||||
local response=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$context_data" \
|
||||
"$HCFS_API_URL/contexts" || echo "")
|
||||
|
||||
if [ -n "$response" ]; then
|
||||
echo "✅ HCFS workspace context created: $workspace_path"
|
||||
echo "$workspace_path" > /tmp/hcfs-workspace-path
|
||||
return 0
|
||||
else
|
||||
echo "⚠️ Failed to create HCFS workspace context, using local storage"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to mount HCFS
|
||||
mount_hcfs() {
|
||||
local workspace_path="$1"
|
||||
|
||||
echo "🔗 Mounting HCFS workspace: $workspace_path"
|
||||
|
||||
# For now, create a symbolic structure since we don't have full FUSE implementation
|
||||
# In production, this would be: fusermount3 -o allow_other "$workspace_path" /mnt/hcfs
|
||||
|
||||
mkdir -p /mnt/hcfs
|
||||
mkdir -p /home/agent/work/{src,build,output,logs}
|
||||
|
||||
# Create workspace metadata
|
||||
cat > /home/agent/work/.hcfs-workspace << EOF
|
||||
HCFS_WORKSPACE_PATH=$workspace_path
|
||||
HCFS_API_URL=$HCFS_API_URL
|
||||
AGENT_ID=$AGENT_ID
|
||||
TASK_ID=$TASK_ID
|
||||
CREATED_AT=$(date -Iseconds)
|
||||
EOF
|
||||
|
||||
# Set ownership
|
||||
chown -R agent:agent /home/agent/work /mnt/hcfs
|
||||
|
||||
echo "✅ HCFS workspace mounted and configured"
|
||||
}
|
||||
|
||||
# Function to setup development environment
|
||||
setup_dev_environment() {
|
||||
echo "🛠️ Setting up development environment..."
|
||||
|
||||
# Create standard development directories
|
||||
sudo -u agent mkdir -p /home/agent/{.local/bin,.config,.cache,work/{src,tests,docs,scripts}}
|
||||
|
||||
# Set up git configuration if provided
|
||||
if [ -n "${GIT_USER_NAME:-}" ] && [ -n "${GIT_USER_EMAIL:-}" ]; then
|
||||
sudo -u agent git config --global user.name "$GIT_USER_NAME"
|
||||
sudo -u agent git config --global user.email "$GIT_USER_EMAIL"
|
||||
echo "✅ Git configuration set: $GIT_USER_NAME <$GIT_USER_EMAIL>"
|
||||
fi
|
||||
|
||||
# Set up Python virtual environment
|
||||
if [ "${SETUP_PYTHON_VENV:-true}" = "true" ]; then
|
||||
sudo -u agent python3 -m venv /home/agent/.venv
|
||||
echo "✅ Python virtual environment created"
|
||||
fi
|
||||
|
||||
echo "✅ Development environment ready"
|
||||
}
|
||||
|
||||
# Function to start background services
|
||||
start_background_services() {
|
||||
echo "🔄 Starting background services..."
|
||||
|
||||
# Start HCFS workspace sync daemon (if needed)
|
||||
if [ "$HCFS_ENABLED" = "true" ] && [ -f /tmp/hcfs-workspace-path ]; then
|
||||
/opt/hcfs/hcfs-workspace.sh daemon &
|
||||
echo "✅ HCFS workspace sync daemon started"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to cleanup on exit
|
||||
cleanup() {
|
||||
echo "🧹 Container cleanup initiated..."
|
||||
|
||||
if [ "$HCFS_ENABLED" = "true" ] && [ -f /tmp/hcfs-workspace-path ]; then
|
||||
echo "💾 Storing final workspace state to HCFS..."
|
||||
/opt/hcfs/hcfs-workspace.sh finalize
|
||||
fi
|
||||
|
||||
echo "✅ Cleanup completed"
|
||||
}
|
||||
|
||||
# Set up signal handlers for graceful shutdown
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
# Main initialization sequence
|
||||
main() {
|
||||
echo "🏁 Starting HCFS Agent Container initialization..."
|
||||
|
||||
# Wait for HCFS if enabled
|
||||
if [ "$HCFS_ENABLED" = "true" ]; then
|
||||
if wait_for_hcfs; then
|
||||
if init_hcfs_workspace; then
|
||||
local workspace_path=$(cat /tmp/hcfs-workspace-path)
|
||||
mount_hcfs "$workspace_path"
|
||||
else
|
||||
echo "⚠️ HCFS workspace initialization failed, continuing with local storage"
|
||||
fi
|
||||
else
|
||||
echo "⚠️ HCFS API unavailable, continuing with local storage"
|
||||
fi
|
||||
else
|
||||
echo "ℹ️ HCFS disabled, using local storage only"
|
||||
fi
|
||||
|
||||
# Set up development environment
|
||||
setup_dev_environment
|
||||
|
||||
# Start background services
|
||||
start_background_services
|
||||
|
||||
echo "🎉 HCFS Agent Container initialization complete!"
|
||||
echo "📁 Workspace: /home/agent/work"
|
||||
echo "🔧 Agent: $AGENT_ID"
|
||||
echo "📋 Task: $TASK_ID"
|
||||
|
||||
# Execute the provided command or start interactive shell
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "🔧 Starting interactive shell..."
|
||||
exec sudo -u agent -i /bin/bash
|
||||
else
|
||||
echo "🚀 Executing command: $*"
|
||||
exec sudo -u agent "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Execute main function
|
||||
main "$@"
|
||||
242
docker/hcfs-base/scripts/hcfs-workspace.sh
Normal file
242
docker/hcfs-base/scripts/hcfs-workspace.sh
Normal file
@@ -0,0 +1,242 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# HCFS Workspace Management Script
|
||||
# Handles workspace synchronization and artifact collection
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
WORKSPACE_DIR="/home/agent/work"
|
||||
HCFS_CONFIG="/home/agent/work/.hcfs-workspace"
|
||||
|
||||
# Load workspace configuration
|
||||
if [ -f "$HCFS_CONFIG" ]; then
|
||||
source "$HCFS_CONFIG"
|
||||
else
|
||||
echo "⚠️ No HCFS workspace configuration found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Logging function
|
||||
log() {
|
||||
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $1" | tee -a /var/log/hcfs/workspace.log
|
||||
}
|
||||
|
||||
# Function to store artifact in HCFS
|
||||
store_artifact() {
|
||||
local artifact_path="$1"
|
||||
local artifact_name="$2"
|
||||
local content="$3"
|
||||
|
||||
local hcfs_artifact_path="${HCFS_WORKSPACE_PATH}/artifacts/${artifact_name}"
|
||||
|
||||
local artifact_data=$(cat <<EOF
|
||||
{
|
||||
"path": "$hcfs_artifact_path",
|
||||
"content": "$content",
|
||||
"summary": "Artifact: $artifact_name",
|
||||
"metadata": {
|
||||
"agent_id": "$AGENT_ID",
|
||||
"task_id": "$TASK_ID",
|
||||
"artifact_name": "$artifact_name",
|
||||
"artifact_type": "workspace_output",
|
||||
"file_path": "$artifact_path",
|
||||
"created_at": "$(date -Iseconds)"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
local response=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$artifact_data" \
|
||||
"$HCFS_API_URL/contexts" || echo "")
|
||||
|
||||
if [ -n "$response" ]; then
|
||||
log "✅ Stored artifact: $artifact_name -> $hcfs_artifact_path"
|
||||
return 0
|
||||
else
|
||||
log "❌ Failed to store artifact: $artifact_name"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to collect and store workspace artifacts
|
||||
collect_artifacts() {
|
||||
log "📦 Collecting workspace artifacts..."
|
||||
|
||||
local artifact_count=0
|
||||
|
||||
# Common artifact patterns
|
||||
local artifact_patterns=(
|
||||
"*.log"
|
||||
"*.md"
|
||||
"*.txt"
|
||||
"*.json"
|
||||
"*.yaml"
|
||||
"*.yml"
|
||||
"output/*"
|
||||
"build/*.json"
|
||||
"build/*.xml"
|
||||
"results/*"
|
||||
"./**/README*"
|
||||
"./**/CHANGELOG*"
|
||||
"./**/requirements*.txt"
|
||||
"./**/package*.json"
|
||||
"./**/Cargo.toml"
|
||||
"./**/go.mod"
|
||||
"./**/pom.xml"
|
||||
)
|
||||
|
||||
for pattern in "${artifact_patterns[@]}"; do
|
||||
while IFS= read -r -d '' file; do
|
||||
if [ -f "$file" ] && [ -s "$file" ]; then
|
||||
local relative_path="${file#$WORKSPACE_DIR/}"
|
||||
local content=$(base64 -w 0 "$file" 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$content" ] && [ ${#content} -lt 1000000 ]; then # Limit to 1MB
|
||||
if store_artifact "$relative_path" "$relative_path" "$content"; then
|
||||
artifact_count=$((artifact_count + 1))
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done < <(find "$WORKSPACE_DIR" -name "$pattern" -type f -print0 2>/dev/null || true)
|
||||
done
|
||||
|
||||
log "✅ Collected $artifact_count artifacts"
|
||||
}
|
||||
|
||||
# Function to update workspace status in HCFS
|
||||
update_workspace_status() {
|
||||
local status="$1"
|
||||
local message="$2"
|
||||
|
||||
local status_data=$(cat <<EOF
|
||||
{
|
||||
"path": "${HCFS_WORKSPACE_PATH}/status",
|
||||
"content": "$message",
|
||||
"summary": "Workspace status: $status",
|
||||
"metadata": {
|
||||
"agent_id": "$AGENT_ID",
|
||||
"task_id": "$TASK_ID",
|
||||
"status": "$status",
|
||||
"timestamp": "$(date -Iseconds)",
|
||||
"hostname": "$(hostname)",
|
||||
"workspace_dir": "$WORKSPACE_DIR"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$status_data" \
|
||||
"$HCFS_API_URL/contexts" > /dev/null || true
|
||||
|
||||
log "📊 Updated workspace status: $status"
|
||||
}
|
||||
|
||||
# Function to sync workspace changes
|
||||
sync_workspace() {
|
||||
log "🔄 Syncing workspace changes..."
|
||||
|
||||
# Create workspace summary
|
||||
local file_count=$(find "$WORKSPACE_DIR" -type f 2>/dev/null | wc -l)
|
||||
local dir_count=$(find "$WORKSPACE_DIR" -type d 2>/dev/null | wc -l)
|
||||
local total_size=$(du -sb "$WORKSPACE_DIR" 2>/dev/null | cut -f1 || echo "0")
|
||||
|
||||
local summary=$(cat <<EOF
|
||||
Workspace Summary ($(date -Iseconds)):
|
||||
- Files: $file_count
|
||||
- Directories: $dir_count
|
||||
- Total Size: $total_size bytes
|
||||
- Agent: $AGENT_ID
|
||||
- Task: $TASK_ID
|
||||
- Container: $(hostname)
|
||||
|
||||
Recent Activity:
|
||||
$(ls -la "$WORKSPACE_DIR" 2>/dev/null | head -10 || echo "No files")
|
||||
EOF
|
||||
)
|
||||
|
||||
update_workspace_status "active" "$summary"
|
||||
}
|
||||
|
||||
# Function to finalize workspace
|
||||
finalize_workspace() {
|
||||
log "🏁 Finalizing workspace..."
|
||||
|
||||
# Collect all artifacts
|
||||
collect_artifacts
|
||||
|
||||
# Create final summary
|
||||
local completion_summary=$(cat <<EOF
|
||||
Workspace Completion Summary:
|
||||
- Agent ID: $AGENT_ID
|
||||
- Task ID: $TASK_ID
|
||||
- Container: $(hostname)
|
||||
- Started: $CREATED_AT
|
||||
- Completed: $(date -Iseconds)
|
||||
- Duration: $(($(date +%s) - $(date -d "$CREATED_AT" +%s 2>/dev/null || echo "0"))) seconds
|
||||
|
||||
Final Workspace Contents:
|
||||
$(find "$WORKSPACE_DIR" -type f 2>/dev/null | head -20 || echo "No files")
|
||||
|
||||
Artifacts Collected:
|
||||
$(ls "$WORKSPACE_DIR"/{output,build,logs,results}/* 2>/dev/null | head -10 || echo "No artifacts")
|
||||
EOF
|
||||
)
|
||||
|
||||
update_workspace_status "completed" "$completion_summary"
|
||||
log "✅ Workspace finalized"
|
||||
}
|
||||
|
||||
# Daemon mode for continuous sync
|
||||
daemon_mode() {
|
||||
log "🔄 Starting HCFS workspace sync daemon..."
|
||||
|
||||
local sync_interval=30 # seconds
|
||||
local last_sync=0
|
||||
|
||||
while true; do
|
||||
local current_time=$(date +%s)
|
||||
|
||||
if [ $((current_time - last_sync)) -ge $sync_interval ]; then
|
||||
sync_workspace
|
||||
last_sync=$current_time
|
||||
fi
|
||||
|
||||
sleep 5
|
||||
done
|
||||
}
|
||||
|
||||
# Main command dispatcher
|
||||
case "${1:-help}" in
|
||||
"sync")
|
||||
sync_workspace
|
||||
;;
|
||||
"collect")
|
||||
collect_artifacts
|
||||
;;
|
||||
"finalize")
|
||||
finalize_workspace
|
||||
;;
|
||||
"daemon")
|
||||
daemon_mode
|
||||
;;
|
||||
"status")
|
||||
update_workspace_status "active" "Status check at $(date -Iseconds)"
|
||||
;;
|
||||
"help"|*)
|
||||
echo "HCFS Workspace Management Script"
|
||||
echo ""
|
||||
echo "Usage: $0 {sync|collect|finalize|daemon|status|help}"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " sync - Sync current workspace state to HCFS"
|
||||
echo " collect - Collect and store artifacts in HCFS"
|
||||
echo " finalize - Finalize workspace and store all artifacts"
|
||||
echo " daemon - Run continuous sync daemon"
|
||||
echo " status - Update workspace status in HCFS"
|
||||
echo " help - Show this help message"
|
||||
;;
|
||||
esac
|
||||
Reference in New Issue
Block a user