Add comprehensive database rebuild capability with complete unified schema
Features: - Complete SQL schema file (000_complete_schema.sql) for full database rebuild - Unified authentication system with UUID-based users, API keys, refresh tokens - All platform tables: users, agents, workflows, tasks, executions, metrics, alerts - Comprehensive indexing strategy for performance optimization - Automated rebuild scripts (Python and Shell) with Docker integration - Detailed documentation with usage instructions and troubleshooting Schema capabilities: ✅ UUID-based design for scalability and consistency ✅ Complete authentication: JWT, API keys, password hashing, token blacklisting ✅ Agent management: Ollama and CLI agents with performance metrics ✅ Workflow orchestration: n8n integration with execution tracking ✅ Task management: Priority-based assignment and status tracking ✅ Monitoring: System alerts, performance metrics, health checks ✅ Default users: admin and developer accounts for immediate access This provides a single-command database rebuild capability that creates the complete Hive platform schema from scratch, resolving all previous schema conflicts and providing a clean foundation for authentication and full platform functionality. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
113
backend/scripts/rebuild_database.py
Executable file
113
backend/scripts/rebuild_database.py
Executable file
@@ -0,0 +1,113 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Database rebuild script for Hive platform.
|
||||
Completely rebuilds the database schema from scratch using the unified schema.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import psycopg2
|
||||
from pathlib import Path
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def get_database_config():
|
||||
"""Get database configuration from environment variables."""
|
||||
return {
|
||||
'host': os.getenv('DB_HOST', 'localhost'),
|
||||
'port': os.getenv('DB_PORT', '5432'),
|
||||
'database': os.getenv('DB_NAME', 'hive'),
|
||||
'user': os.getenv('DB_USER', 'postgres'),
|
||||
'password': os.getenv('DB_PASSWORD', 'hive123'),
|
||||
}
|
||||
|
||||
def execute_sql_file(connection, sql_file_path):
|
||||
"""Execute an SQL file against the database."""
|
||||
try:
|
||||
with open(sql_file_path, 'r') as file:
|
||||
sql_content = file.read()
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(sql_content)
|
||||
|
||||
connection.commit()
|
||||
logger.info(f"Successfully executed {sql_file_path}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to execute {sql_file_path}: {e}")
|
||||
connection.rollback()
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Main function to rebuild the database."""
|
||||
logger.info("🔄 Starting Hive database rebuild...")
|
||||
|
||||
# Get database configuration
|
||||
db_config = get_database_config()
|
||||
logger.info(f"Connecting to database: {db_config['host']}:{db_config['port']}/{db_config['database']}")
|
||||
|
||||
# Connect to database
|
||||
try:
|
||||
connection = psycopg2.connect(**db_config)
|
||||
logger.info("✅ Connected to database successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to connect to database: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
# Path to the complete schema file
|
||||
schema_file = Path(__file__).parent.parent / "migrations" / "000_complete_schema.sql"
|
||||
|
||||
if not schema_file.exists():
|
||||
logger.error(f"❌ Schema file not found: {schema_file}")
|
||||
sys.exit(1)
|
||||
|
||||
logger.info(f"📄 Using schema file: {schema_file}")
|
||||
|
||||
# Execute the complete schema
|
||||
logger.info("🏗️ Rebuilding database schema...")
|
||||
if execute_sql_file(connection, schema_file):
|
||||
logger.info("✅ Database schema rebuilt successfully!")
|
||||
|
||||
# Verify the rebuild
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT COUNT(*) FROM users;")
|
||||
user_count = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM pg_tables WHERE schemaname = 'public';")
|
||||
table_count = cursor.fetchone()[0]
|
||||
|
||||
logger.info(f"📊 Database verification:")
|
||||
logger.info(f" - Tables created: {table_count}")
|
||||
logger.info(f" - Initial users: {user_count}")
|
||||
|
||||
if user_count >= 2:
|
||||
logger.info("🔐 Default users created successfully")
|
||||
logger.warning("⚠️ SECURITY: Change default passwords in production!")
|
||||
else:
|
||||
logger.warning("⚠️ Warning: Expected at least 2 initial users")
|
||||
|
||||
else:
|
||||
logger.error("❌ Failed to rebuild database schema")
|
||||
sys.exit(1)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Unexpected error during rebuild: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
finally:
|
||||
connection.close()
|
||||
logger.info("🔌 Database connection closed")
|
||||
|
||||
logger.info("🎉 Hive database rebuild completed successfully!")
|
||||
logger.info("🚀 Ready for authentication and full platform functionality")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
153
backend/scripts/rebuild_database.sh
Executable file
153
backend/scripts/rebuild_database.sh
Executable file
@@ -0,0 +1,153 @@
|
||||
#!/bin/bash
|
||||
# Hive Database Rebuild Script
|
||||
# Completely rebuilds the Hive database schema using Docker and the complete schema file
|
||||
|
||||
set -e
|
||||
|
||||
echo "🔄 Starting Hive database rebuild..."
|
||||
|
||||
# Configuration
|
||||
POSTGRES_HOST=${DB_HOST:-"hive_postgres"}
|
||||
POSTGRES_DB=${DB_NAME:-"hive"}
|
||||
POSTGRES_USER=${DB_USER:-"postgres"}
|
||||
POSTGRES_PASSWORD=${DB_PASSWORD:-"hive123"}
|
||||
POSTGRES_PORT=${DB_PORT:-"5432"}
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[0;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
echo_info() { echo -e "${BLUE}$1${NC}"; }
|
||||
echo_success() { echo -e "${GREEN}$1${NC}"; }
|
||||
echo_warning() { echo -e "${YELLOW}$1${NC}"; }
|
||||
echo_error() { echo -e "${RED}$1${NC}"; }
|
||||
|
||||
# Check if Docker is available
|
||||
if ! command -v docker &> /dev/null; then
|
||||
echo_error "❌ Docker is not available"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if we're in the right directory
|
||||
if [[ ! -f "./migrations/000_complete_schema.sql" ]]; then
|
||||
echo_error "❌ Complete schema file not found. Please run from backend directory."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo_info "📄 Using complete schema: ./migrations/000_complete_schema.sql"
|
||||
|
||||
# Check if PostgreSQL container is running
|
||||
if ! docker service ls | grep -q hive_postgres; then
|
||||
echo_warning "⚠️ PostgreSQL service not found in Docker swarm"
|
||||
echo_info "🚀 Starting PostgreSQL service..."
|
||||
|
||||
# Try to find a PostgreSQL container to use
|
||||
if docker ps | grep -q postgres; then
|
||||
echo_info "📦 Found running PostgreSQL container"
|
||||
else
|
||||
echo_error "❌ No PostgreSQL container available. Please start the Hive stack first."
|
||||
echo_info "Run: docker stack deploy -c docker-compose.swarm.yml hive"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Function to execute SQL using Docker
|
||||
execute_sql() {
|
||||
local sql_file="$1"
|
||||
echo_info "🏗️ Executing SQL file: $sql_file"
|
||||
|
||||
# Copy SQL file to a temporary location and execute it via Docker
|
||||
docker run --rm \
|
||||
--network hive_default \
|
||||
-v "$(pwd):/workspace" \
|
||||
-e PGPASSWORD="$POSTGRES_PASSWORD" \
|
||||
postgres:15-alpine \
|
||||
psql -h "$POSTGRES_HOST" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -f "/workspace/$sql_file"
|
||||
}
|
||||
|
||||
# Function to test database connection
|
||||
test_connection() {
|
||||
echo_info "🔌 Testing database connection..."
|
||||
|
||||
docker run --rm \
|
||||
--network hive_default \
|
||||
-e PGPASSWORD="$POSTGRES_PASSWORD" \
|
||||
postgres:15-alpine \
|
||||
psql -h "$POSTGRES_HOST" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "SELECT version();" > /dev/null 2>&1
|
||||
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo_success "✅ Database connection successful"
|
||||
return 0
|
||||
else
|
||||
echo_error "❌ Database connection failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to verify rebuild
|
||||
verify_rebuild() {
|
||||
echo_info "📊 Verifying database rebuild..."
|
||||
|
||||
local result=$(docker run --rm \
|
||||
--network hive_default \
|
||||
-e PGPASSWORD="$POSTGRES_PASSWORD" \
|
||||
postgres:15-alpine \
|
||||
psql -h "$POSTGRES_HOST" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -t -c "
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM pg_tables WHERE schemaname = 'public') as tables,
|
||||
(SELECT COUNT(*) FROM users) as users;
|
||||
")
|
||||
|
||||
local tables=$(echo "$result" | awk '{print $1}')
|
||||
local users=$(echo "$result" | awk '{print $3}')
|
||||
|
||||
echo_info " - Tables created: $tables"
|
||||
echo_info " - Initial users: $users"
|
||||
|
||||
if [[ $tables -gt 10 ]] && [[ $users -ge 2 ]]; then
|
||||
echo_success "✅ Database rebuild verification passed"
|
||||
echo_warning "⚠️ SECURITY: Change default passwords in production!"
|
||||
return 0
|
||||
else
|
||||
echo_error "❌ Database rebuild verification failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Main execution
|
||||
main() {
|
||||
# Test connection first
|
||||
if ! test_connection; then
|
||||
echo_error "❌ Cannot proceed without database connection"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Execute the complete schema rebuild
|
||||
echo_info "🏗️ Rebuilding database schema..."
|
||||
|
||||
if execute_sql "migrations/000_complete_schema.sql"; then
|
||||
echo_success "✅ Database schema rebuilt successfully!"
|
||||
|
||||
# Verify the rebuild
|
||||
if verify_rebuild; then
|
||||
echo_success "🎉 Hive database rebuild completed successfully!"
|
||||
echo_info "🚀 Ready for authentication and full platform functionality"
|
||||
echo_info ""
|
||||
echo_info "Default credentials:"
|
||||
echo_info " Admin: admin@hive.local / admin123"
|
||||
echo_info " Developer: developer@hive.local / dev123"
|
||||
echo_warning "⚠️ CHANGE THESE PASSWORDS IN PRODUCTION!"
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo_error "❌ Failed to rebuild database schema"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Run main function
|
||||
main "$@"
|
||||
Reference in New Issue
Block a user