diff --git a/deploy.sh b/deploy.sh index 7ed8d88..fa5e43c 100755 --- a/deploy.sh +++ b/deploy.sh @@ -35,6 +35,7 @@ REMOTE_COPY_SOURCE=0 REMOTE_ARGS_PROVIDED=0 REMOTE_AUTO_DEPLOY=0 REMOTE_AUTO_DEPLOY=0 +REMOTE_CLEAN_RUNTIME=0 REMOTE_STORAGE_OVERRIDE="" REMOTE_CONTAINER_USER_OVERRIDE="" REMOTE_ENV_FILE="" @@ -168,6 +169,16 @@ collect_remote_details(){ esac fi + if [ "$interactive" -eq 1 ] && [ "$REMOTE_ARGS_PROVIDED" -eq 0 ]; then + local cleanup_answer + read -rp "Stop/remove remote containers & project images during migration? [y/N]: " cleanup_answer + cleanup_answer="${cleanup_answer:-n}" + case "${cleanup_answer,,}" in + y|yes) REMOTE_CLEAN_RUNTIME=1 ;; + *) REMOTE_CLEAN_RUNTIME=0 ;; + esac + fi + # Optional remote env overrides (default to current values) local storage_default container_user_default storage_default="$(read_env STORAGE_PATH "./storage")" @@ -240,6 +251,7 @@ Options: --remote-skip-storage Skip syncing the storage directory during migration --remote-copy-source Copy the local project directory to remote instead of relying on git --remote-auto-deploy Run './deploy.sh --yes --no-watch' on the remote host after migration + --remote-clean-runtime Stop/remove remote containers & project images during migration --remote-storage-path PATH Override STORAGE_PATH/STORAGE_PATH_LOCAL in the remote .env --remote-container-user USER[:GROUP] Override CONTAINER_USER in the remote .env --skip-config Skip applying server configuration preset @@ -270,6 +282,7 @@ while [[ $# -gt 0 ]]; do --remote-skip-storage) REMOTE_SKIP_STORAGE=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;; --remote-copy-source) REMOTE_COPY_SOURCE=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;; --remote-auto-deploy) REMOTE_AUTO_DEPLOY=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;; + --remote-clean-runtime) REMOTE_CLEAN_RUNTIME=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;; --remote-storage-path) REMOTE_STORAGE_OVERRIDE="$2"; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift 2;; --remote-container-user) REMOTE_CONTAINER_USER_OVERRIDE="$2"; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift 2;; --skip-config) SKIP_CONFIG=1; shift;; @@ -680,6 +693,10 @@ run_remote_migration(){ args+=(--copy-source) fi + if [ "$REMOTE_CLEAN_RUNTIME" -eq 1 ]; then + args+=(--cleanup-runtime) + fi + if [ "$ASSUME_YES" -eq 1 ]; then args+=(--yes) fi diff --git a/docker-compose.yml b/docker-compose.yml index 4e709d8..d9f2e22 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -557,7 +557,7 @@ services: AC_UPDATES_ENABLE_DATABASES: "7" AC_BIND_IP: "0.0.0.0" AC_DATA_DIR: "/azerothcore/data" - AC_SOAP_PORT: "7878" + AC_SOAP_PORT: "${SOAP_PORT}" AC_PROCESS_PRIORITY: "0" AC_ELUNA_ENABLED: "${AC_ELUNA_ENABLED}" AC_ELUNA_TRACE_BACK: "${AC_ELUNA_TRACE_BACK}" @@ -688,7 +688,7 @@ services: AC_UPDATES_ENABLE_DATABASES: "7" AC_BIND_IP: "0.0.0.0" AC_DATA_DIR: "/azerothcore/data" - AC_SOAP_PORT: "7878" + AC_SOAP_PORT: "${SOAP_PORT}" AC_PROCESS_PRIORITY: "0" AC_ELUNA_ENABLED: "${AC_ELUNA_ENABLED}" AC_ELUNA_TRACE_BACK: "${AC_ELUNA_TRACE_BACK}" @@ -744,7 +744,7 @@ services: AC_UPDATES_ENABLE_DATABASES: "7" AC_BIND_IP: "0.0.0.0" AC_DATA_DIR: "/azerothcore/data" - AC_SOAP_PORT: "7878" + AC_SOAP_PORT: "${SOAP_PORT}" AC_PROCESS_PRIORITY: "0" AC_ELUNA_ENABLED: "${AC_ELUNA_ENABLED}" AC_ELUNA_TRACE_BACK: "${AC_ELUNA_TRACE_BACK}" diff --git a/scripts/bash/backup-export.sh b/scripts/bash/backup-export.sh index 4af7f68..a83cf56 100755 --- a/scripts/bash/backup-export.sh +++ b/scripts/bash/backup-export.sh @@ -4,8 +4,17 @@ set -euo pipefail INVOCATION_DIR="$PWD" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" cd "$SCRIPT_DIR" +# Load environment defaults if present +if [ -f "$PROJECT_ROOT/.env" ]; then + set -a + # shellcheck disable=SC1091 + source "$PROJECT_ROOT/.env" + set +a +fi + SUPPORTED_DBS=(auth characters world) declare -A SUPPORTED_SET=() for db in "${SUPPORTED_DBS[@]}"; do @@ -16,10 +25,12 @@ declare -A DB_NAMES=([auth]="" [characters]="" [world]="") declare -a INCLUDE_DBS=() declare -a SKIP_DBS=() -MYSQL_PW="" +MYSQL_PW="${MYSQL_ROOT_PASSWORD:-}" DEST_PARENT="" DEST_PROVIDED=false EXPLICIT_SELECTION=false +MYSQL_CONTAINER="${CONTAINER_MYSQL:-ac-mysql}" +DEFAULT_BACKUP_DIR="${BACKUP_PATH:-${STORAGE_PATH:-./storage}/backups}" usage(){ cat <<'EOF' @@ -28,7 +39,7 @@ Usage: ./backup-export.sh [options] Creates a timestamped backup of one or more ACore databases. Options: - -o, --output DIR Destination directory (default: storage/backups) + -o, --output DIR Destination directory (default: BACKUP_PATH from .env, fallback: ./storage/backups) -p, --password PASS MySQL root password --auth-db NAME Auth database schema name --characters-db NAME Characters database schema name @@ -224,13 +235,9 @@ done if $DEST_PROVIDED; then DEST_PARENT="$(resolve_relative "$INVOCATION_DIR" "$DEST_PARENT")" else - # Use storage/backups as default to align with existing backup structure - if [ -d "$SCRIPT_DIR/storage" ]; then - DEST_PARENT="$SCRIPT_DIR/storage/backups" - mkdir -p "$DEST_PARENT" - else - DEST_PARENT="$SCRIPT_DIR" - fi + DEFAULT_BACKUP_DIR="$(resolve_relative "$PROJECT_ROOT" "$DEFAULT_BACKUP_DIR")" + DEST_PARENT="$DEFAULT_BACKUP_DIR" + mkdir -p "$DEST_PARENT" fi TIMESTAMP="$(date +%Y%m%d_%H%M%S)" @@ -241,7 +248,7 @@ generated_at="$(date --iso-8601=seconds)" dump_db(){ local schema="$1" outfile="$2" echo "Dumping ${schema} -> ${outfile}" - docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$schema" | gzip > "$outfile" + docker exec "$MYSQL_CONTAINER" mysqldump -uroot -p"$MYSQL_PW" "$schema" | gzip > "$outfile" } for db in "${ACTIVE_DBS[@]}"; do diff --git a/scripts/bash/db-health-check.sh b/scripts/bash/db-health-check.sh index 3f983b0..abc1276 100755 --- a/scripts/bash/db-health-check.sh +++ b/scripts/bash/db-health-check.sh @@ -32,6 +32,22 @@ SHOW_PENDING=0 SHOW_MODULES=1 CONTAINER_NAME="ac-mysql" +resolve_path(){ + local base="$1" path="$2" + if command -v python3 >/dev/null 2>&1; then + python3 - "$base" "$path" <<'PY' +import os, sys +base, path = sys.argv[1:3] +if os.path.isabs(path): + print(os.path.normpath(path)) +else: + print(os.path.normpath(os.path.join(base, path))) +PY + else + (cd "$base" && realpath -m "$path") + fi +} + usage() { cat <<'EOF' Usage: ./db-health-check.sh [options] @@ -73,6 +89,10 @@ if [ -f "$PROJECT_ROOT/.env" ]; then set +a fi +BACKUP_PATH_RAW="${BACKUP_PATH:-${STORAGE_PATH:-./storage}/backups}" +BACKUP_PATH="$(resolve_path "$PROJECT_ROOT" "$BACKUP_PATH_RAW")" +CONTAINER_NAME="${CONTAINER_MYSQL:-$CONTAINER_NAME}" + MYSQL_HOST="${MYSQL_HOST:-ac-mysql}" MYSQL_PORT="${MYSQL_PORT:-3306}" MYSQL_USER="${MYSQL_USER:-root}" @@ -263,7 +283,7 @@ show_module_updates() { # Get backup information get_backup_info() { - local backup_dir="$PROJECT_ROOT/storage/backups" + local backup_dir="$BACKUP_PATH" if [ ! -d "$backup_dir" ]; then printf " ${ICON_INFO} No backups directory found\n" diff --git a/scripts/bash/migrate-stack.sh b/scripts/bash/migrate-stack.sh index 327be41..18f14b0 100755 --- a/scripts/bash/migrate-stack.sh +++ b/scripts/bash/migrate-stack.sh @@ -149,6 +149,7 @@ Options: --storage PATH Remote storage directory (default: /storage) --skip-storage Do not sync the storage directory --copy-source Copy the full local project directory instead of syncing via git + --cleanup-runtime Stop/remove existing ac-* containers and project images on remote --yes, -y Auto-confirm prompts (for existing deployments) --help Show this help EOF_HELP @@ -164,6 +165,7 @@ REMOTE_STORAGE="" SKIP_STORAGE=0 ASSUME_YES=0 COPY_SOURCE=0 +CLEANUP_RUNTIME=0 while [[ $# -gt 0 ]]; do case "$1" in @@ -177,6 +179,7 @@ while [[ $# -gt 0 ]]; do --storage) REMOTE_STORAGE="$2"; shift 2;; --skip-storage) SKIP_STORAGE=1; shift;; --copy-source) COPY_SOURCE=1; shift;; + --cleanup-runtime) CLEANUP_RUNTIME=1; shift;; --yes|-y) ASSUME_YES=1; shift;; --help|-h) usage; exit 0;; *) echo "Unknown option: $1" >&2; usage; exit 1;; @@ -385,6 +388,11 @@ setup_remote_repository(){ } cleanup_stale_docker_resources(){ + if [ "$CLEANUP_RUNTIME" -ne 1 ]; then + echo "⋅ Skipping remote runtime cleanup (containers and images preserved)." + return + fi + echo "⋅ Cleaning up stale Docker resources on remote..." # Stop and remove old containers diff --git a/scripts/bash/test-phase1-integration.sh b/scripts/bash/test-phase1-integration.sh index 5ebb027..479b71a 100755 --- a/scripts/bash/test-phase1-integration.sh +++ b/scripts/bash/test-phase1-integration.sh @@ -22,6 +22,32 @@ ICON_ERROR="❌" ICON_INFO="ℹ️" ICON_TEST="🧪" +resolve_path(){ + local base="$1" path="$2" + if command -v python3 >/dev/null 2>&1; then + python3 - "$base" "$path" <<'PY' +import os, sys +base, path = sys.argv[1:3] +if os.path.isabs(path): + print(os.path.normpath(path)) +else: + print(os.path.normpath(os.path.join(base, path))) +PY + else + (cd "$base" && realpath -m "$path") + fi +} + +if [ -f "$PROJECT_ROOT/.env" ]; then + set -a + # shellcheck disable=SC1091 + source "$PROJECT_ROOT/.env" + set +a +fi + +LOCAL_MODULES_DIR_RAW="${STORAGE_PATH_LOCAL:-./local-storage}/modules" +LOCAL_MODULES_DIR="$(resolve_path "$PROJECT_ROOT" "$LOCAL_MODULES_DIR_RAW")" + # Counters TESTS_TOTAL=0 TESTS_PASSED=0 @@ -117,7 +143,7 @@ info "Running: python3 scripts/python/modules.py generate" if python3 scripts/python/modules.py \ --env-path .env \ --manifest config/module-manifest.json \ - generate --output-dir local-storage/modules > /tmp/phase1-modules-generate.log 2>&1; then + generate --output-dir "$LOCAL_MODULES_DIR" > /tmp/phase1-modules-generate.log 2>&1; then ok "Module state generation successful" else # Check if it's just warnings @@ -130,11 +156,11 @@ fi # Test 4: Verify SQL manifest created test_header "SQL Manifest Verification" -if [ -f local-storage/modules/.sql-manifest.json ]; then - ok "SQL manifest created: local-storage/modules/.sql-manifest.json" +if [ -f "$LOCAL_MODULES_DIR/.sql-manifest.json" ]; then + ok "SQL manifest created: $LOCAL_MODULES_DIR/.sql-manifest.json" # Check manifest structure - module_count=$(python3 -c "import json; data=json.load(open('local-storage/modules/.sql-manifest.json')); print(len(data.get('modules', [])))" 2>/dev/null || echo "0") + module_count=$(python3 -c "import json; data=json.load(open('$LOCAL_MODULES_DIR/.sql-manifest.json')); print(len(data.get('modules', [])))" 2>/dev/null || echo "0") info "Modules with SQL: $module_count" if [ "$module_count" -gt 0 ]; then @@ -142,7 +168,7 @@ if [ -f local-storage/modules/.sql-manifest.json ]; then # Show first module info "Sample module SQL info:" - python3 -c "import json; data=json.load(open('local-storage/modules/.sql-manifest.json')); m=data['modules'][0] if data['modules'] else {}; print(f\" Name: {m.get('name', 'N/A')}\n SQL files: {len(m.get('sql_files', {}))}\") " 2>/dev/null || true + python3 -c "import json; data=json.load(open('$LOCAL_MODULES_DIR/.sql-manifest.json')); m=data['modules'][0] if data['modules'] else {}; print(f\" Name: {m.get('name', 'N/A')}\n SQL files: {len(m.get('sql_files', {}))}\") " 2>/dev/null || true else warn "No modules with SQL files (expected if modules not yet staged)" fi @@ -152,19 +178,19 @@ fi # Test 5: Verify modules.env created test_header "Module Environment File Check" -if [ -f local-storage/modules/modules.env ]; then +if [ -f "$LOCAL_MODULES_DIR/modules.env" ]; then ok "modules.env created" # Check for key exports - if grep -q "MODULES_ENABLED=" local-storage/modules/modules.env; then + if grep -q "MODULES_ENABLED=" "$LOCAL_MODULES_DIR/modules.env"; then ok "MODULES_ENABLED variable present" fi - if grep -q "MODULES_REQUIRES_CUSTOM_BUILD=" local-storage/modules/modules.env; then + if grep -q "MODULES_REQUIRES_CUSTOM_BUILD=" "$LOCAL_MODULES_DIR/modules.env"; then ok "Build requirement flags present" # Check if build required - source local-storage/modules/modules.env + source "$LOCAL_MODULES_DIR/modules.env" if [ "${MODULES_REQUIRES_CUSTOM_BUILD:-0}" = "1" ]; then info "Custom build required (C++ modules enabled)" else @@ -177,8 +203,8 @@ fi # Test 6: Check build requirement test_header "Build Requirement Check" -if [ -f local-storage/modules/modules.env ]; then - source local-storage/modules/modules.env +if [ -f "$LOCAL_MODULES_DIR/modules.env" ]; then + source "$LOCAL_MODULES_DIR/modules.env" info "MODULES_REQUIRES_CUSTOM_BUILD=${MODULES_REQUIRES_CUSTOM_BUILD:-0}" info "MODULES_REQUIRES_PLAYERBOT_SOURCE=${MODULES_REQUIRES_PLAYERBOT_SOURCE:-0}" diff --git a/scripts/bash/verify-deployment.sh b/scripts/bash/verify-deployment.sh index b84f825..05dca7d 100755 --- a/scripts/bash/verify-deployment.sh +++ b/scripts/bash/verify-deployment.sh @@ -98,12 +98,23 @@ read_env_value(){ if [ -f "$env_path" ]; then value="$(grep -E "^${key}=" "$env_path" | tail -n1 | cut -d'=' -f2- | tr -d '\r')" fi + # Fallback to template defaults if not set in the chosen env file + if [ -z "$value" ] && [ -f "$TEMPLATE_FILE" ]; then + value="$(grep -E "^${key}=" "$TEMPLATE_FILE" | tail -n1 | cut -d'=' -f2- | tr -d '\r')" + fi if [ -z "$value" ]; then value="$default" fi echo "$value" } +MYSQL_EXTERNAL_PORT="$(read_env_value MYSQL_EXTERNAL_PORT 64306)" +AUTH_EXTERNAL_PORT="$(read_env_value AUTH_EXTERNAL_PORT 3784)" +WORLD_EXTERNAL_PORT="$(read_env_value WORLD_EXTERNAL_PORT 8215)" +SOAP_EXTERNAL_PORT="$(read_env_value SOAP_EXTERNAL_PORT 7778)" +PMA_EXTERNAL_PORT="$(read_env_value PMA_EXTERNAL_PORT 8081)" +KEIRA3_EXTERNAL_PORT="$(read_env_value KEIRA3_EXTERNAL_PORT 4201)" + handle_auto_rebuild(){ local storage_path storage_path="$(read_env_value STORAGE_PATH_LOCAL "./local-storage")" @@ -171,7 +182,7 @@ health_checks(){ check_health ac-worldserver || ((failures++)) if [ "$QUICK" = false ]; then info "Port checks" - for port in 64306 3784 8215 7778 8081 4201; do + for port in "$MYSQL_EXTERNAL_PORT" "$AUTH_EXTERNAL_PORT" "$WORLD_EXTERNAL_PORT" "$SOAP_EXTERNAL_PORT" "$PMA_EXTERNAL_PORT" "$KEIRA3_EXTERNAL_PORT"; do if timeout 3 bash -c "/dev/null; then ok "port $port: open"; else warn "port $port: closed"; fi done fi @@ -190,7 +201,7 @@ main(){ fi health_checks handle_auto_rebuild - info "Endpoints: MySQL:64306, Auth:3784, World:8215, SOAP:7778, phpMyAdmin:8081, Keira3:4201" + info "Endpoints: MySQL:${MYSQL_EXTERNAL_PORT}, Auth:${AUTH_EXTERNAL_PORT}, World:${WORLD_EXTERNAL_PORT}, SOAP:${SOAP_EXTERNAL_PORT}, phpMyAdmin:${PMA_EXTERNAL_PORT}, Keira3:${KEIRA3_EXTERNAL_PORT}" } main "$@"