- Agent roles and coordination features - Chat API integration testing - New configuration and workspace management 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
358 lines
9.2 KiB
Bash
Executable File
358 lines
9.2 KiB
Bash
Executable File
#!/bin/bash
|
|
set -euo pipefail
|
|
|
|
# HCFS Docker Images Build Script
|
|
# Builds all HCFS-enabled development environment containers
|
|
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
|
|
|
# Colors for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
NC='\033[0m' # No Color
|
|
|
|
# Configuration
|
|
REGISTRY="${DOCKER_REGISTRY:-registry.home.deepblack.cloud}"
|
|
NAMESPACE="${DOCKER_NAMESPACE:-tony}"
|
|
VERSION="${VERSION:-latest}"
|
|
BUILD_PARALLEL="${BUILD_PARALLEL:-false}"
|
|
|
|
# Logging functions
|
|
log() {
|
|
echo -e "${BLUE}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1"
|
|
}
|
|
|
|
success() {
|
|
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
|
}
|
|
|
|
warning() {
|
|
echo -e "${YELLOW}[WARNING]${NC} $1"
|
|
}
|
|
|
|
error() {
|
|
echo -e "${RED}[ERROR]${NC} $1" >&2
|
|
}
|
|
|
|
# Function to build a single image
|
|
build_image() {
|
|
local image_name="$1"
|
|
local dockerfile_dir="$2"
|
|
local build_args="$3"
|
|
|
|
log "Building image: $image_name"
|
|
|
|
local full_image_name="$REGISTRY/$NAMESPACE/$image_name:$VERSION"
|
|
local build_cmd="docker build"
|
|
|
|
# Add build arguments if provided
|
|
if [ -n "$build_args" ]; then
|
|
build_cmd="$build_cmd $build_args"
|
|
fi
|
|
|
|
# Add tags
|
|
build_cmd="$build_cmd -t $image_name:$VERSION -t $image_name:latest"
|
|
build_cmd="$build_cmd -t $full_image_name"
|
|
|
|
# Add dockerfile directory
|
|
build_cmd="$build_cmd $dockerfile_dir"
|
|
|
|
if eval $build_cmd; then
|
|
success "Built image: $image_name"
|
|
return 0
|
|
else
|
|
error "Failed to build image: $image_name"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Function to prepare HCFS SDK files
|
|
prepare_hcfs_sdks() {
|
|
log "Preparing HCFS SDK files..."
|
|
|
|
local sdk_dir="$SCRIPT_DIR/sdks"
|
|
mkdir -p "$sdk_dir"
|
|
|
|
# Copy Python SDK
|
|
if [ -d "$PROJECT_ROOT/../HCFS/hcfs-python" ]; then
|
|
cp -r "$PROJECT_ROOT/../HCFS/hcfs-python" "$sdk_dir/hcfs-python-sdk"
|
|
success "Copied Python HCFS SDK"
|
|
else
|
|
warning "Python HCFS SDK not found, creating minimal version"
|
|
mkdir -p "$sdk_dir/hcfs-python-sdk"
|
|
cat > "$sdk_dir/hcfs-python-sdk/setup.py" << 'EOF'
|
|
from setuptools import setup, find_packages
|
|
|
|
setup(
|
|
name="hcfs-sdk",
|
|
version="1.0.0",
|
|
packages=find_packages(),
|
|
install_requires=["httpx", "pydantic"],
|
|
)
|
|
EOF
|
|
mkdir -p "$sdk_dir/hcfs-python-sdk/hcfs"
|
|
echo "# HCFS Python SDK Placeholder" > "$sdk_dir/hcfs-python-sdk/hcfs/__init__.py"
|
|
fi
|
|
|
|
# Create Node.js SDK
|
|
mkdir -p "$sdk_dir/hcfs-nodejs-sdk"
|
|
cat > "$sdk_dir/hcfs-nodejs-sdk/package.json" << 'EOF'
|
|
{
|
|
"name": "@hcfs/sdk",
|
|
"version": "1.0.0",
|
|
"description": "HCFS Node.js SDK",
|
|
"main": "index.js",
|
|
"dependencies": {
|
|
"axios": "^1.0.0"
|
|
}
|
|
}
|
|
EOF
|
|
echo "module.exports = { HCFSClient: class HCFSClient {} };" > "$sdk_dir/hcfs-nodejs-sdk/index.js"
|
|
|
|
# Create Go SDK
|
|
mkdir -p "$sdk_dir/hcfs-go-sdk"
|
|
cat > "$sdk_dir/hcfs-go-sdk/go.mod" << 'EOF'
|
|
module github.com/hcfs/go-sdk
|
|
|
|
go 1.21
|
|
|
|
require (
|
|
github.com/go-resty/resty/v2 v2.7.0
|
|
)
|
|
EOF
|
|
cat > "$sdk_dir/hcfs-go-sdk/client.go" << 'EOF'
|
|
package client
|
|
|
|
import "github.com/go-resty/resty/v2"
|
|
|
|
type HCFSClient struct {
|
|
client *resty.Client
|
|
baseURL string
|
|
}
|
|
|
|
func NewHCFSClient(baseURL string) (*HCFSClient, error) {
|
|
return &HCFSClient{
|
|
client: resty.New(),
|
|
baseURL: baseURL,
|
|
}, nil
|
|
}
|
|
EOF
|
|
|
|
success "HCFS SDKs prepared"
|
|
}
|
|
|
|
# Function to copy scripts
|
|
prepare_scripts() {
|
|
log "Preparing build scripts..."
|
|
|
|
# Copy scripts to each image directory
|
|
for image_dir in "$SCRIPT_DIR"/hcfs-*; do
|
|
if [ -d "$image_dir" ]; then
|
|
mkdir -p "$image_dir/scripts"
|
|
mkdir -p "$image_dir/config"
|
|
mkdir -p "$image_dir/hcfs-client"
|
|
|
|
# Copy common scripts
|
|
cp "$SCRIPT_DIR/hcfs-base/scripts/"* "$image_dir/scripts/" 2>/dev/null || true
|
|
cp "$SCRIPT_DIR/hcfs-base/config/"* "$image_dir/config/" 2>/dev/null || true
|
|
|
|
# Copy HCFS client
|
|
cp -r "$SCRIPT_DIR/sdks/hcfs-python-sdk/"* "$image_dir/hcfs-client/" 2>/dev/null || true
|
|
fi
|
|
done
|
|
|
|
success "Scripts prepared"
|
|
}
|
|
|
|
# Function to validate prerequisites
|
|
validate_prerequisites() {
|
|
log "Validating prerequisites..."
|
|
|
|
# Check if Docker is available
|
|
if ! command -v docker &> /dev/null; then
|
|
error "Docker is not installed or not in PATH"
|
|
exit 1
|
|
fi
|
|
|
|
# Check if Docker daemon is running
|
|
if ! docker info &> /dev/null; then
|
|
error "Docker daemon is not running"
|
|
exit 1
|
|
fi
|
|
|
|
# Check if required directories exist
|
|
if [ ! -d "$SCRIPT_DIR/hcfs-base" ]; then
|
|
error "Base image directory not found: $SCRIPT_DIR/hcfs-base"
|
|
exit 1
|
|
fi
|
|
|
|
success "Prerequisites validated"
|
|
}
|
|
|
|
# Function to build all images
|
|
build_all_images() {
|
|
log "Building HCFS development environment images..."
|
|
|
|
local images=(
|
|
"bzzz-hcfs-base:$SCRIPT_DIR/hcfs-base:"
|
|
"bzzz-hcfs-python:$SCRIPT_DIR/hcfs-python:"
|
|
"bzzz-hcfs-nodejs:$SCRIPT_DIR/hcfs-nodejs:"
|
|
"bzzz-hcfs-go:$SCRIPT_DIR/hcfs-go:"
|
|
)
|
|
|
|
local failed_builds=()
|
|
|
|
if [ "$BUILD_PARALLEL" = "true" ]; then
|
|
log "Building images in parallel..."
|
|
local pids=()
|
|
|
|
for image_spec in "${images[@]}"; do
|
|
IFS=':' read -r image_name dockerfile_dir build_args <<< "$image_spec"
|
|
(build_image "$image_name" "$dockerfile_dir" "$build_args") &
|
|
pids+=($!)
|
|
done
|
|
|
|
# Wait for all builds to complete
|
|
for pid in "${pids[@]}"; do
|
|
if ! wait $pid; then
|
|
failed_builds+=("PID:$pid")
|
|
fi
|
|
done
|
|
else
|
|
log "Building images sequentially..."
|
|
|
|
for image_spec in "${images[@]}"; do
|
|
IFS=':' read -r image_name dockerfile_dir build_args <<< "$image_spec"
|
|
if ! build_image "$image_name" "$dockerfile_dir" "$build_args"; then
|
|
failed_builds+=("$image_name")
|
|
fi
|
|
done
|
|
fi
|
|
|
|
# Report results
|
|
if [ ${#failed_builds[@]} -eq 0 ]; then
|
|
success "All images built successfully!"
|
|
else
|
|
error "Failed to build images: ${failed_builds[*]}"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Function to push images to registry
|
|
push_images() {
|
|
log "Pushing images to registry: $REGISTRY"
|
|
|
|
local images=(
|
|
"bzzz-hcfs-base"
|
|
"bzzz-hcfs-python"
|
|
"bzzz-hcfs-nodejs"
|
|
"bzzz-hcfs-go"
|
|
)
|
|
|
|
for image in "${images[@]}"; do
|
|
local full_name="$REGISTRY/$NAMESPACE/$image:$VERSION"
|
|
|
|
log "Pushing $full_name..."
|
|
if docker push "$full_name"; then
|
|
success "Pushed $full_name"
|
|
else
|
|
warning "Failed to push $full_name"
|
|
fi
|
|
done
|
|
}
|
|
|
|
# Function to run tests
|
|
test_images() {
|
|
log "Testing built images..."
|
|
|
|
local images=(
|
|
"bzzz-hcfs-base"
|
|
"bzzz-hcfs-python"
|
|
"bzzz-hcfs-nodejs"
|
|
"bzzz-hcfs-go"
|
|
)
|
|
|
|
for image in "${images[@]}"; do
|
|
log "Testing $image..."
|
|
|
|
# Basic smoke test
|
|
if docker run --rm "$image:$VERSION" /bin/echo "Image $image test successful"; then
|
|
success "Test passed: $image"
|
|
else
|
|
warning "Test failed: $image"
|
|
fi
|
|
done
|
|
}
|
|
|
|
# Function to clean up
|
|
cleanup() {
|
|
log "Cleaning up temporary files..."
|
|
|
|
# Remove copied SDK files
|
|
rm -rf "$SCRIPT_DIR/sdks"
|
|
|
|
# Clean up dangling images
|
|
docker image prune -f &> /dev/null || true
|
|
|
|
success "Cleanup completed"
|
|
}
|
|
|
|
# Main execution
|
|
main() {
|
|
local command="${1:-build}"
|
|
|
|
case $command in
|
|
"build")
|
|
validate_prerequisites
|
|
prepare_hcfs_sdks
|
|
prepare_scripts
|
|
build_all_images
|
|
;;
|
|
"push")
|
|
push_images
|
|
;;
|
|
"test")
|
|
test_images
|
|
;;
|
|
"all")
|
|
validate_prerequisites
|
|
prepare_hcfs_sdks
|
|
prepare_scripts
|
|
build_all_images
|
|
test_images
|
|
push_images
|
|
;;
|
|
"clean")
|
|
cleanup
|
|
;;
|
|
"help"|*)
|
|
echo "HCFS Docker Images Build Script"
|
|
echo ""
|
|
echo "Usage: $0 {build|push|test|all|clean|help}"
|
|
echo ""
|
|
echo "Commands:"
|
|
echo " build - Build all HCFS development images"
|
|
echo " push - Push images to registry"
|
|
echo " test - Run smoke tests on built images"
|
|
echo " all - Build, test, and push images"
|
|
echo " clean - Clean up temporary files"
|
|
echo " help - Show this help message"
|
|
echo ""
|
|
echo "Environment Variables:"
|
|
echo " DOCKER_REGISTRY - Docker registry URL (default: registry.home.deepblack.cloud)"
|
|
echo " DOCKER_NAMESPACE - Docker namespace (default: tony)"
|
|
echo " VERSION - Image version tag (default: latest)"
|
|
echo " BUILD_PARALLEL - Build images in parallel (default: false)"
|
|
exit 0
|
|
;;
|
|
esac
|
|
}
|
|
|
|
# Set up signal handlers for cleanup
|
|
trap cleanup EXIT INT TERM
|
|
|
|
# Execute main function
|
|
main "$@" |