#!/bin/bash # # Paper Dynasty Database Sync - Production to Dev # # Syncs production PostgreSQL database (akamai) to dev environment (pd-database) # # Usage: # ./sync_prod_to_dev.sh [--dry-run] [--no-backup] # # Options: # --dry-run Show what would happen without making changes # --no-backup Skip creating backup of dev database before restore # set -e SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" TIMESTAMP=$(date +%Y%m%d_%H%M%S) DUMP_FILE="/tmp/pd_prod_dump_${TIMESTAMP}.sql" BACKUP_DIR="$HOME/.paper-dynasty/db-backups" # Production database info PROD_HOST="akamai" PROD_CONTAINER="sba_postgres" PROD_DB="pd_master" PROD_USER="pd_admin" PROD_PASSWORD="wJHZRZbO5NJBjhGfqydsZueV" # Dev database info DEV_HOST="pd-database" DEV_CONTAINER="sba_postgres" DEV_DB="paperdynasty_dev" DEV_USER="sba_admin" # Parse arguments DRY_RUN=false NO_BACKUP=false SKIP_CONFIRM=false while [[ $# -gt 0 ]]; do case $1 in --dry-run) DRY_RUN=true shift ;; --no-backup) NO_BACKUP=true shift ;; --yes|-y) SKIP_CONFIRM=true shift ;; *) echo "Unknown option: $1" echo "Usage: $0 [--dry-run] [--no-backup] [--yes]" exit 1 ;; esac done # Colors for output GREEN='\033[0;32m' YELLOW='\033[1;33m' RED='\033[0;31m' BLUE='\033[0;34m' NC='\033[0m' # No Color log_info() { echo -e "${BLUE}[INFO]${NC} $1" } log_success() { echo -e "${GREEN}[SUCCESS]${NC} $1" } log_warning() { echo -e "${YELLOW}[WARNING]${NC} $1" } log_error() { echo -e "${RED}[ERROR]${NC} $1" } # Step 1: Verify connectivity log_info "Verifying connectivity to production and dev databases..." if ! ssh -q "$PROD_HOST" "docker exec $PROD_CONTAINER psql -U $PROD_USER -d $PROD_DB -c 'SELECT 1' > /dev/null 2>&1"; then log_error "Cannot connect to production database on $PROD_HOST" exit 1 fi if ! ssh -q "$DEV_HOST" "docker exec $DEV_CONTAINER psql -U $DEV_USER -d $DEV_DB -c 'SELECT 1' > /dev/null 2>&1"; then log_error "Cannot connect to dev database on $DEV_HOST" exit 1 fi log_success "Database connectivity verified" # Step 2: Get database statistics log_info "Fetching database statistics..." PROD_SIZE=$(ssh "$PROD_HOST" "docker exec $PROD_CONTAINER psql -U $PROD_USER -d $PROD_DB -t -c \"SELECT pg_size_pretty(pg_database_size('$PROD_DB'));\"" | xargs) PROD_TABLES=$(ssh "$PROD_HOST" "docker exec $PROD_CONTAINER psql -U $PROD_USER -d $PROD_DB -t -c \"SELECT count(*) FROM information_schema.tables WHERE table_schema = 'public';\"" | xargs) DEV_SIZE=$(ssh "$DEV_HOST" "docker exec $DEV_CONTAINER psql -U $DEV_USER -d $DEV_DB -t -c \"SELECT pg_size_pretty(pg_database_size('$DEV_DB'));\"" | xargs) DEV_TABLES=$(ssh "$DEV_HOST" "docker exec $DEV_CONTAINER psql -U $DEV_USER -d $DEV_DB -t -c \"SELECT count(*) FROM information_schema.tables WHERE table_schema = 'public';\"" | xargs) echo "" log_info "Production Database (${PROD_HOST}):" echo " Database: $PROD_DB" echo " Size: $PROD_SIZE" echo " Tables: $PROD_TABLES" echo "" log_info "Dev Database (${DEV_HOST}):" echo " Database: $DEV_DB" echo " Size: $DEV_SIZE" echo " Tables: $DEV_TABLES" echo "" if [ "$DRY_RUN" = true ]; then log_warning "DRY RUN MODE - No changes will be made" echo "" log_info "Would perform the following steps:" echo " 1. Create pg_dump of production database ($PROD_SIZE)" if [ "$NO_BACKUP" = false ]; then echo " 2. Backup current dev database to $BACKUP_DIR" fi echo " 3. Drop and recreate dev database schema" echo " 4. Restore production dump to dev database" echo " 5. Verify restored database" echo " 6. Clean up temporary files" exit 0 fi # Confirmation prompt if [ "$SKIP_CONFIRM" = false ]; then log_warning "This will REPLACE the dev database with production data!" read -p "Continue? (yes/no): " -r echo if [[ ! $REPLY =~ ^[Yy][Ee][Ss]$ ]]; then log_info "Aborted by user" exit 0 fi else log_warning "Skipping confirmation (--yes flag provided)" fi # Step 3: Create backup directory mkdir -p "$BACKUP_DIR" # Step 4: Backup current dev database (optional) if [ "$NO_BACKUP" = false ]; then log_info "Creating backup of current dev database..." BACKUP_FILE="$BACKUP_DIR/paperdynasty_dev_${TIMESTAMP}.sql" ssh "$DEV_HOST" "docker exec $DEV_CONTAINER pg_dump -U $DEV_USER -d $DEV_DB --clean --if-exists" > "$BACKUP_FILE" if [ -f "$BACKUP_FILE" ]; then BACKUP_SIZE=$(du -h "$BACKUP_FILE" | cut -f1) log_success "Dev database backed up to $BACKUP_FILE ($BACKUP_SIZE)" else log_error "Failed to create backup" exit 1 fi fi # Step 5: Create production database dump log_info "Creating production database dump..." ssh "$PROD_HOST" "docker exec $PROD_CONTAINER pg_dump -U $PROD_USER -d $PROD_DB --clean --if-exists" > "$DUMP_FILE" if [ ! -f "$DUMP_FILE" ]; then log_error "Failed to create production dump" exit 1 fi DUMP_SIZE=$(du -h "$DUMP_FILE" | cut -f1) log_success "Production dump created: $DUMP_FILE ($DUMP_SIZE)" # Step 6: Restore to dev database log_info "Restoring production dump to dev database..." # Restore the dump if ssh "$DEV_HOST" "docker exec -i $DEV_CONTAINER psql -U $DEV_USER -d $DEV_DB" < "$DUMP_FILE"; then log_success "Database restored successfully" else log_error "Failed to restore database" log_warning "Backup is available at: $BACKUP_FILE" exit 1 fi # Step 7: Verify restoration log_info "Verifying restored database..." NEW_SIZE=$(ssh "$DEV_HOST" "docker exec $DEV_CONTAINER psql -U $DEV_USER -d $DEV_DB -t -c \"SELECT pg_size_pretty(pg_database_size('$DEV_DB'));\"" | xargs) NEW_TABLES=$(ssh "$DEV_HOST" "docker exec $DEV_CONTAINER psql -U $DEV_USER -d $DEV_DB -t -c \"SELECT count(*) FROM information_schema.tables WHERE table_schema = 'public';\"" | xargs) echo "" log_info "Dev Database After Restore:" echo " Database: $DEV_DB" echo " Size: $NEW_SIZE (was $DEV_SIZE)" echo " Tables: $NEW_TABLES (was $DEV_TABLES)" echo "" # Step 8: Clean up temporary dump file log_info "Cleaning up temporary files..." rm -f "$DUMP_FILE" log_success "Temporary dump file removed" # Step 9: Summary echo "" log_success "Database sync complete!" echo "" log_info "Summary:" echo " Production ($PROD_HOST): $PROD_SIZE, $PROD_TABLES tables" echo " Dev ($DEV_HOST): $NEW_SIZE, $NEW_TABLES tables" if [ "$NO_BACKUP" = false ]; then echo " Backup: $BACKUP_FILE" fi echo "" log_info "Dev database is now synchronized with production"