mirror of
https://github.com/uprightbass360/AzerothCore-RealmMaster.git
synced 2026-01-13 00:58:34 +00:00
import enhancements and npc spawn sketches
This commit is contained in:
@@ -452,10 +452,10 @@ else
|
||||
log "No conflicts detected"
|
||||
fi
|
||||
|
||||
# Calculate ID offsets
|
||||
# Calculate ID offsets with proper spacing
|
||||
ACCOUNT_OFFSET=$CURRENT_MAX_ACCOUNT_ID
|
||||
CHAR_OFFSET=$CURRENT_MAX_CHAR_GUID
|
||||
ITEM_OFFSET=$CURRENT_MAX_ITEM_GUID
|
||||
ITEM_OFFSET=$((CURRENT_MAX_ITEM_GUID + 10000))
|
||||
|
||||
info ""
|
||||
info "ID remapping offsets:"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# azerothcore-rm
|
||||
set -e
|
||||
|
||||
BACKUP_DIR_BASE="/backups"
|
||||
BACKUP_DIR_BASE="${BACKUP_DIR_BASE:-/backups}"
|
||||
HOURLY_DIR="$BACKUP_DIR_BASE/hourly"
|
||||
DAILY_DIR="$BACKUP_DIR_BASE/daily"
|
||||
RETENTION_HOURS=${BACKUP_RETENTION_HOURS:-6}
|
||||
@@ -14,16 +14,56 @@ mkdir -p "$HOURLY_DIR" "$DAILY_DIR"
|
||||
|
||||
log() { echo "[$(date '+%F %T')] $*"; }
|
||||
|
||||
db_exists() {
|
||||
local name="$1"
|
||||
[ -z "$name" ] && return 1
|
||||
local sanitized="${name//\`/}"
|
||||
if mysql -h"${MYSQL_HOST}" -P"${MYSQL_PORT}" -u"${MYSQL_USER}" -p"${MYSQL_PASSWORD}" -e "USE \`${sanitized}\`;" >/dev/null 2>&1; then
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
# Build database list from env (include optional acore_playerbots if present)
|
||||
database_list() {
|
||||
local dbs=("${DB_AUTH_NAME}" "${DB_WORLD_NAME}" "${DB_CHARACTERS_NAME}")
|
||||
if mysql -h"${MYSQL_HOST}" -P"${MYSQL_PORT}" -u"${MYSQL_USER}" -p"${MYSQL_PASSWORD}" -e "USE acore_playerbots;" >/dev/null 2>&1; then
|
||||
declare -A seen=()
|
||||
for base in "${dbs[@]}"; do
|
||||
[ -n "$base" ] && seen["$base"]=1
|
||||
done
|
||||
|
||||
if db_exists "acore_playerbots" && [ -z "${seen[acore_playerbots]}" ]; then
|
||||
dbs+=("acore_playerbots")
|
||||
seen["acore_playerbots"]=1
|
||||
log "Detected optional database: acore_playerbots (will be backed up)" >&2
|
||||
fi
|
||||
|
||||
if [ -n "${BACKUP_EXTRA_DATABASES:-}" ]; then
|
||||
local normalized="${BACKUP_EXTRA_DATABASES//,/ }"
|
||||
for extra in $normalized; do
|
||||
[ -z "$extra" ] && continue
|
||||
if [ -n "${seen[$extra]}" ]; then
|
||||
continue
|
||||
fi
|
||||
if db_exists "$extra"; then
|
||||
dbs+=("$extra")
|
||||
seen["$extra"]=1
|
||||
log "Configured extra database '${extra}' added to backup rotation" >&2
|
||||
else
|
||||
log "⚠️ Configured extra database '${extra}' not found (skipping)" >&2
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
printf '%s\n' "${dbs[@]}"
|
||||
}
|
||||
|
||||
if [ "${BACKUP_SCHEDULER_LIST_ONLY:-0}" = "1" ]; then
|
||||
mapfile -t _dbs < <(database_list)
|
||||
printf '%s\n' "${_dbs[@]}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
run_backup() {
|
||||
local tier_dir="$1" # hourly or daily dir
|
||||
local tier_type="$2" # "hourly" or "daily"
|
||||
|
||||
@@ -305,12 +305,17 @@ echo "✅ Fresh databases created - proceeding with schema import"
|
||||
|
||||
echo "📝 Creating dbimport configuration..."
|
||||
mkdir -p /azerothcore/env/dist/etc
|
||||
TEMP_DIR="/azerothcore/env/dist/temp"
|
||||
mkdir -p "$TEMP_DIR"
|
||||
MYSQL_EXECUTABLE="$(command -v mysql || echo '/usr/bin/mysql')"
|
||||
cat > /azerothcore/env/dist/etc/dbimport.conf <<EOF
|
||||
LoginDatabaseInfo = "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_AUTH_NAME}"
|
||||
WorldDatabaseInfo = "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_WORLD_NAME}"
|
||||
CharacterDatabaseInfo = "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_CHARACTERS_NAME}"
|
||||
Updates.EnableDatabases = 7
|
||||
Updates.AutoSetup = 1
|
||||
TempDir = "${TEMP_DIR}"
|
||||
MySQLExecutable = "${MYSQL_EXECUTABLE}"
|
||||
EOF
|
||||
|
||||
echo "🚀 Running database import..."
|
||||
|
||||
257
scripts/bash/fix-item-import.sh
Executable file
257
scripts/bash/fix-item-import.sh
Executable file
@@ -0,0 +1,257 @@
|
||||
#!/bin/bash
|
||||
# Fix item import for backup-merged characters
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
COLOR_RED='\033[0;31m'
|
||||
COLOR_GREEN='\033[0;32m'
|
||||
COLOR_YELLOW='\033[1;33m'
|
||||
COLOR_BLUE='\033[0;34m'
|
||||
COLOR_CYAN='\033[0;36m'
|
||||
COLOR_RESET='\033[0m'
|
||||
|
||||
log(){ printf '%b\n' "${COLOR_GREEN}$*${COLOR_RESET}"; }
|
||||
info(){ printf '%b\n' "${COLOR_CYAN}$*${COLOR_RESET}"; }
|
||||
warn(){ printf '%b\n' "${COLOR_YELLOW}$*${COLOR_RESET}"; }
|
||||
err(){ printf '%b\n' "${COLOR_RED}$*${COLOR_RESET}"; }
|
||||
fatal(){ err "$*"; exit 1; }
|
||||
|
||||
MYSQL_PW="azerothcore123"
|
||||
BACKUP_DIR="/nfs/containers/ac-backup"
|
||||
AUTH_DB="acore_auth"
|
||||
CHARACTERS_DB="acore_characters"
|
||||
|
||||
# Verify parameters
|
||||
[[ -d "$BACKUP_DIR" ]] || fatal "Backup directory not found: $BACKUP_DIR"
|
||||
|
||||
# Setup temp directory
|
||||
TEMP_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf "$TEMP_DIR"' EXIT
|
||||
|
||||
# MySQL connection helper
|
||||
mysql_exec(){
|
||||
local db="$1"
|
||||
docker exec -i ac-mysql mysql -uroot -p"$MYSQL_PW" "$db" 2>/dev/null
|
||||
}
|
||||
|
||||
mysql_query(){
|
||||
local db="$1"
|
||||
local query="$2"
|
||||
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B "$db" -e "$query" 2>/dev/null
|
||||
}
|
||||
|
||||
log "═══════════════════════════════════════════════════════════"
|
||||
log " FIXING ITEM IMPORT FOR BACKUP-MERGED CHARACTERS"
|
||||
log "═══════════════════════════════════════════════════════════"
|
||||
|
||||
# Find characters that were imported from the backup (accounts 451, 452)
|
||||
log "Finding characters that need item restoration..."
|
||||
IMPORTED_CHARS=$(mysql_query "$CHARACTERS_DB" "SELECT name, guid FROM characters WHERE account IN (451, 452);")
|
||||
|
||||
if [[ -z "$IMPORTED_CHARS" ]]; then
|
||||
fatal "No imported characters found (accounts 451, 452)"
|
||||
fi
|
||||
|
||||
info "Found imported characters:"
|
||||
echo "$IMPORTED_CHARS" | while read -r char_name char_guid; do
|
||||
info " $char_name (guid: $char_guid)"
|
||||
done
|
||||
|
||||
# Check current item count for these characters
|
||||
CURRENT_ITEM_COUNT=$(mysql_query "$CHARACTERS_DB" "SELECT COUNT(*) FROM item_instance WHERE owner_guid IN (4501, 4502, 4503);")
|
||||
info "Current items for imported characters: $CURRENT_ITEM_COUNT"
|
||||
|
||||
if [[ "$CURRENT_ITEM_COUNT" != "0" ]]; then
|
||||
warn "Characters already have items. Exiting."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Extract backup files
|
||||
log "Extracting backup files..."
|
||||
CHARACTERS_DUMP=""
|
||||
for pattern in "acore_characters.sql.gz" "characters.sql.gz" "acore_characters.sql" "characters.sql"; do
|
||||
if [[ -f "$BACKUP_DIR/$pattern" ]]; then
|
||||
CHARACTERS_DUMP="$BACKUP_DIR/$pattern"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
[[ -n "$CHARACTERS_DUMP" ]] || fatal "Characters database dump not found in $BACKUP_DIR"
|
||||
|
||||
info "Found characters dump: ${CHARACTERS_DUMP##*/}"
|
||||
|
||||
# Extract dump to temp file
|
||||
if [[ "$CHARACTERS_DUMP" == *.gz ]]; then
|
||||
zcat "$CHARACTERS_DUMP" > "$TEMP_DIR/characters.sql"
|
||||
else
|
||||
cp "$CHARACTERS_DUMP" "$TEMP_DIR/characters.sql"
|
||||
fi
|
||||
|
||||
# Create staging database
|
||||
log "Creating staging database..."
|
||||
STAGE_CHARS_DB="fix_stage_chars_$$"
|
||||
|
||||
# Drop any existing staging database
|
||||
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -e "DROP DATABASE IF EXISTS $STAGE_CHARS_DB;" 2>/dev/null || true
|
||||
|
||||
# Create staging database
|
||||
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -e "CREATE DATABASE $STAGE_CHARS_DB;" 2>/dev/null
|
||||
|
||||
# Cleanup staging database on exit
|
||||
cleanup_staging(){
|
||||
if [[ -n "${STAGE_CHARS_DB:-}" ]]; then
|
||||
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -e "DROP DATABASE IF EXISTS $STAGE_CHARS_DB;" 2>/dev/null || true
|
||||
fi
|
||||
}
|
||||
trap 'cleanup_staging; rm -rf "$TEMP_DIR"' EXIT
|
||||
|
||||
# Load backup into staging database
|
||||
info "Loading backup into staging database..."
|
||||
sed "s/\`acore_characters\`/\`$STAGE_CHARS_DB\`/g; s/USE \`acore_characters\`;/USE \`$STAGE_CHARS_DB\`;/g" "$TEMP_DIR/characters.sql" | \
|
||||
docker exec -i ac-mysql mysql -uroot -p"$MYSQL_PW" 2>/dev/null
|
||||
|
||||
# Get current database state
|
||||
CURRENT_MAX_ITEM_GUID=$(mysql_query "$CHARACTERS_DB" "SELECT COALESCE(MAX(guid), 0) FROM item_instance;")
|
||||
ITEM_OFFSET=$((CURRENT_MAX_ITEM_GUID + 10000))
|
||||
|
||||
info "Current max item GUID: $CURRENT_MAX_ITEM_GUID"
|
||||
info "Item GUID offset: +$ITEM_OFFSET"
|
||||
|
||||
# Create character mapping for the imported characters
|
||||
log "Creating character mapping..."
|
||||
mysql_exec "$STAGE_CHARS_DB" <<EOF
|
||||
CREATE TABLE character_guid_map (
|
||||
old_guid INT UNSIGNED PRIMARY KEY,
|
||||
new_guid INT UNSIGNED,
|
||||
name VARCHAR(12)
|
||||
);
|
||||
|
||||
INSERT INTO character_guid_map (old_guid, new_guid, name)
|
||||
VALUES
|
||||
(1, 4501, 'Artimage'),
|
||||
(2, 4502, 'Flombey'),
|
||||
(3, 4503, 'Hammertime');
|
||||
EOF
|
||||
|
||||
# Create item GUID mapping
|
||||
mysql_exec "$STAGE_CHARS_DB" <<EOF
|
||||
CREATE TABLE item_guid_map (
|
||||
old_guid INT UNSIGNED PRIMARY KEY,
|
||||
new_guid INT UNSIGNED,
|
||||
owner_guid INT UNSIGNED
|
||||
);
|
||||
|
||||
INSERT INTO item_guid_map (old_guid, new_guid, owner_guid)
|
||||
SELECT
|
||||
i.guid,
|
||||
i.guid + $ITEM_OFFSET,
|
||||
i.owner_guid
|
||||
FROM item_instance i
|
||||
INNER JOIN character_guid_map cm ON i.owner_guid = cm.old_guid;
|
||||
EOF
|
||||
|
||||
# Check how many items will be imported
|
||||
ITEMS_TO_IMPORT=$(mysql_query "$STAGE_CHARS_DB" "SELECT COUNT(*) FROM item_guid_map;")
|
||||
info "Items to import: $ITEMS_TO_IMPORT"
|
||||
|
||||
if [[ "$ITEMS_TO_IMPORT" == "0" ]]; then
|
||||
warn "No items found for the imported characters in backup"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Stop services
|
||||
log "Stopping world/auth services..."
|
||||
docker stop ac-worldserver ac-authserver >/dev/null 2>&1 || warn "Services already stopped"
|
||||
|
||||
# Import items
|
||||
log "Importing character items..."
|
||||
|
||||
# Import item_instance
|
||||
ITEM_SQL=$(cat <<EOSQL
|
||||
INSERT INTO item_instance (guid, itemEntry, owner_guid, creatorGuid, giftCreatorGuid, count,
|
||||
duration, charges, flags, enchantments, randomPropertyId, durability,
|
||||
playedTime, text)
|
||||
SELECT
|
||||
im.new_guid,
|
||||
ii.itemEntry,
|
||||
cm.new_guid,
|
||||
ii.creatorGuid,
|
||||
ii.giftCreatorGuid,
|
||||
ii.count,
|
||||
ii.duration,
|
||||
ii.charges,
|
||||
ii.flags,
|
||||
ii.enchantments,
|
||||
ii.randomPropertyId,
|
||||
ii.durability,
|
||||
ii.playedTime,
|
||||
ii.text
|
||||
FROM $STAGE_CHARS_DB.item_instance ii
|
||||
INNER JOIN $STAGE_CHARS_DB.item_guid_map im ON ii.guid = im.old_guid
|
||||
INNER JOIN $STAGE_CHARS_DB.character_guid_map cm ON ii.owner_guid = cm.old_guid;
|
||||
EOSQL
|
||||
)
|
||||
|
||||
ITEM_SQL_EXPANDED=$(echo "$ITEM_SQL" | sed "s/STAGE_CHARS_DB/$STAGE_CHARS_DB/g")
|
||||
ITEM_RESULT=$(echo "$ITEM_SQL_EXPANDED" | docker exec -i ac-mysql mysql -uroot -p"$MYSQL_PW" "$CHARACTERS_DB" 2>&1)
|
||||
if echo "$ITEM_RESULT" | grep -q "ERROR"; then
|
||||
err "Item import failed:"
|
||||
echo "$ITEM_RESULT" | grep "ERROR" >&2
|
||||
fatal "Item import failed"
|
||||
fi
|
||||
|
||||
# Import character_inventory
|
||||
INV_SQL=$(cat <<EOSQL
|
||||
INSERT INTO character_inventory (guid, bag, slot, item)
|
||||
SELECT
|
||||
cm.new_guid,
|
||||
ci.bag,
|
||||
ci.slot,
|
||||
im.new_guid
|
||||
FROM $STAGE_CHARS_DB.character_inventory ci
|
||||
INNER JOIN $STAGE_CHARS_DB.character_guid_map cm ON ci.guid = cm.old_guid
|
||||
INNER JOIN $STAGE_CHARS_DB.item_guid_map im ON ci.item = im.old_guid;
|
||||
EOSQL
|
||||
)
|
||||
|
||||
INV_SQL_EXPANDED=$(echo "$INV_SQL" | sed "s/STAGE_CHARS_DB/$STAGE_CHARS_DB/g")
|
||||
INV_RESULT=$(echo "$INV_SQL_EXPANDED" | docker exec -i ac-mysql mysql -uroot -p"$MYSQL_PW" "$CHARACTERS_DB" 2>&1)
|
||||
if echo "$INV_RESULT" | grep -q "ERROR"; then
|
||||
err "Inventory import failed:"
|
||||
echo "$INV_RESULT" | grep "ERROR" >&2
|
||||
fatal "Inventory import failed"
|
||||
fi
|
||||
|
||||
# Report counts
|
||||
ITEMS_IMPORTED=$(mysql_query "$CHARACTERS_DB" "SELECT COUNT(*) FROM item_instance WHERE owner_guid IN (4501, 4502, 4503);")
|
||||
INV_IMPORTED=$(mysql_query "$CHARACTERS_DB" "SELECT COUNT(*) FROM character_inventory WHERE guid IN (4501, 4502, 4503);")
|
||||
|
||||
info "Items imported: $ITEMS_IMPORTED"
|
||||
info "Inventory slots imported: $INV_IMPORTED"
|
||||
|
||||
# Restart services
|
||||
log "Restarting services..."
|
||||
docker restart ac-authserver ac-worldserver >/dev/null 2>&1
|
||||
|
||||
log "Waiting for services to initialize..."
|
||||
sleep 5
|
||||
|
||||
for i in {1..30}; do
|
||||
if docker exec ac-worldserver pgrep worldserver >/dev/null 2>&1 && docker exec ac-authserver pgrep authserver >/dev/null 2>&1; then
|
||||
log "✓ Services running"
|
||||
break
|
||||
fi
|
||||
if [ $i -eq 30 ]; then
|
||||
warn "Services took longer than expected to start"
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
|
||||
log ""
|
||||
log "═══════════════════════════════════════════════════════════"
|
||||
log " ITEM IMPORT FIX COMPLETE"
|
||||
log "═══════════════════════════════════════════════════════════"
|
||||
log "Items successfully restored for imported characters!"
|
||||
log "Players can now log in with their complete characters and items."
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
# Copy user database files from database-import/ to backup system
|
||||
set -e
|
||||
# Copy user database files or full backup archives from database-import/ to backup system
|
||||
set -euo pipefail
|
||||
|
||||
# Source environment variables
|
||||
if [ -f ".env" ]; then
|
||||
@@ -13,14 +13,30 @@ IMPORT_DIR="./database-import"
|
||||
STORAGE_PATH="${STORAGE_PATH:-./storage}"
|
||||
STORAGE_PATH_LOCAL="${STORAGE_PATH_LOCAL:-./local-storage}"
|
||||
BACKUP_DIR="${STORAGE_PATH}/backups/daily"
|
||||
FULL_BACKUP_DIR="${STORAGE_PATH}/backups/ImportBackup"
|
||||
TIMESTAMP=$(date +%Y-%m-%d)
|
||||
|
||||
# Exit if no import directory or empty
|
||||
if [ ! -d "$IMPORT_DIR" ] || [ -z "$(ls -A "$IMPORT_DIR" 2>/dev/null | grep -E '\.(sql|sql\.gz)$')" ]; then
|
||||
echo "📁 No database files found in $IMPORT_DIR - skipping import"
|
||||
shopt -s nullglob
|
||||
|
||||
sql_files=("$IMPORT_DIR"/*.sql "$IMPORT_DIR"/*.sql.gz)
|
||||
archive_files=("$IMPORT_DIR"/*.tar "$IMPORT_DIR"/*.tar.gz "$IMPORT_DIR"/*.tgz "$IMPORT_DIR"/*.zip)
|
||||
|
||||
declare -a full_backup_dirs=()
|
||||
for dir in "$IMPORT_DIR"/*/; do
|
||||
dir="${dir%/}"
|
||||
# Skip if no dump-like files inside
|
||||
if compgen -G "$dir"/*.sql >/dev/null || compgen -G "$dir"/*.sql.gz >/dev/null; then
|
||||
full_backup_dirs+=("$dir")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ! -d "$IMPORT_DIR" ] || { [ ${#sql_files[@]} -eq 0 ] && [ ${#archive_files[@]} -eq 0 ] && [ ${#full_backup_dirs[@]} -eq 0 ]; }; then
|
||||
echo "📁 No database files or full backups found in $IMPORT_DIR - skipping import"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
shopt -u nullglob
|
||||
|
||||
# Exit if backup system already has databases restored
|
||||
if [ -f "${STORAGE_PATH_LOCAL}/mysql-data/.restore-completed" ]; then
|
||||
echo "✅ Database already restored - skipping import"
|
||||
@@ -31,10 +47,25 @@ echo "📥 Found database files in $IMPORT_DIR"
|
||||
echo "📂 Copying to backup system for import..."
|
||||
|
||||
# Ensure backup directory exists
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
mkdir -p "$BACKUP_DIR" "$FULL_BACKUP_DIR"
|
||||
|
||||
generate_unique_path(){
|
||||
local target="$1"
|
||||
local base="$target"
|
||||
local counter=2
|
||||
while [ -e "$target" ]; do
|
||||
target="${base}_${counter}"
|
||||
counter=$((counter + 1))
|
||||
done
|
||||
printf '%s\n' "$target"
|
||||
}
|
||||
|
||||
copied_sql=0
|
||||
staged_dirs=0
|
||||
staged_archives=0
|
||||
|
||||
# Copy files with smart naming
|
||||
for file in "$IMPORT_DIR"/*.sql "$IMPORT_DIR"/*.sql.gz; do
|
||||
for file in "${sql_files[@]:-}"; do
|
||||
[ -f "$file" ] || continue
|
||||
|
||||
filename=$(basename "$file")
|
||||
@@ -62,7 +93,106 @@ for file in "$IMPORT_DIR"/*.sql "$IMPORT_DIR"/*.sql.gz; do
|
||||
|
||||
echo "📋 Copying $filename → $target_name"
|
||||
cp "$file" "$target_path"
|
||||
copied_sql=$((copied_sql + 1))
|
||||
done
|
||||
|
||||
echo "✅ Database files copied to backup system"
|
||||
echo "💡 Files will be automatically imported during deployment"
|
||||
stage_backup_directory(){
|
||||
local src_dir="$1"
|
||||
local dirname
|
||||
dirname="$(basename "$src_dir")"
|
||||
local dest="$FULL_BACKUP_DIR/$dirname"
|
||||
dest="$(generate_unique_path "$dest")"
|
||||
echo "📦 Staging full backup directory $(basename "$src_dir") → $(basename "$dest")"
|
||||
cp -a "$src_dir" "$dest"
|
||||
staged_dirs=$((staged_dirs + 1))
|
||||
}
|
||||
|
||||
extract_archive(){
|
||||
local archive="$1"
|
||||
local base_name
|
||||
base_name="$(basename "$archive")"
|
||||
local tmp_dir
|
||||
tmp_dir="$(mktemp -d)"
|
||||
local extracted=0
|
||||
|
||||
cleanup_tmp(){
|
||||
rm -rf "$tmp_dir"
|
||||
}
|
||||
|
||||
case "$archive" in
|
||||
*.tar.gz|*.tgz)
|
||||
if tar -xzf "$archive" -C "$tmp_dir"; then
|
||||
extracted=1
|
||||
fi
|
||||
;;
|
||||
*.tar)
|
||||
if tar -xf "$archive" -C "$tmp_dir"; then
|
||||
extracted=1
|
||||
fi
|
||||
;;
|
||||
*.zip)
|
||||
if ! command -v unzip >/dev/null 2>&1; then
|
||||
echo "⚠️ unzip not found; cannot extract $base_name"
|
||||
elif unzip -q "$archive" -d "$tmp_dir"; then
|
||||
extracted=1
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "⚠️ Unsupported archive format for $base_name"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "$extracted" -ne 1 ]; then
|
||||
cleanup_tmp
|
||||
return
|
||||
fi
|
||||
|
||||
mapfile -d '' entries < <(find "$tmp_dir" -mindepth 1 -maxdepth 1 -print0) || true
|
||||
local dest=""
|
||||
if [ ${#entries[@]} -eq 1 ] && [ -d "${entries[0]}" ]; then
|
||||
local inner_name
|
||||
inner_name="$(basename "${entries[0]}")"
|
||||
dest="$FULL_BACKUP_DIR/$inner_name"
|
||||
dest="$(generate_unique_path "$dest")"
|
||||
mv "${entries[0]}" "$dest"
|
||||
else
|
||||
local base="${base_name%.*}"
|
||||
base="${base%.*}" # handle double extensions like .tar.gz
|
||||
dest="$(generate_unique_path "$FULL_BACKUP_DIR/$base")"
|
||||
mkdir -p "$dest"
|
||||
if [ ${#entries[@]} -gt 0 ]; then
|
||||
mv "${entries[@]}" "$dest"/
|
||||
fi
|
||||
fi
|
||||
echo "🗂️ Extracted $base_name → $(basename "$dest")"
|
||||
staged_archives=$((staged_archives + 1))
|
||||
cleanup_tmp
|
||||
}
|
||||
|
||||
for dir in "${full_backup_dirs[@]:-}"; do
|
||||
stage_backup_directory "$dir"
|
||||
done
|
||||
|
||||
for archive in "${archive_files[@]:-}"; do
|
||||
extract_archive "$archive"
|
||||
done
|
||||
|
||||
if [ "$copied_sql" -gt 0 ]; then
|
||||
echo "✅ $copied_sql database file(s) copied to $BACKUP_DIR"
|
||||
fi
|
||||
if [ "$staged_dirs" -gt 0 ]; then
|
||||
dir_label="directories"
|
||||
[ "$staged_dirs" -eq 1 ] && dir_label="directory"
|
||||
echo "✅ $staged_dirs full backup $dir_label staged in $FULL_BACKUP_DIR"
|
||||
fi
|
||||
if [ "$staged_archives" -gt 0 ]; then
|
||||
archive_label="archives"
|
||||
[ "$staged_archives" -eq 1 ] && archive_label="archive"
|
||||
echo "✅ $staged_archives backup $archive_label extracted to $FULL_BACKUP_DIR"
|
||||
fi
|
||||
|
||||
if [ "$copied_sql" -eq 0 ] && [ "$staged_dirs" -eq 0 ] && [ "$staged_archives" -eq 0 ]; then
|
||||
echo "⚠️ No valid files or backups were staged. Ensure your dumps are .sql/.sql.gz or packaged in directories/archives."
|
||||
else
|
||||
echo "💡 Files will be automatically imported during deployment"
|
||||
fi
|
||||
|
||||
@@ -128,6 +128,13 @@ resolve_project_image(){
|
||||
echo "${project_name}:${tag}"
|
||||
}
|
||||
|
||||
is_project_local_image(){
|
||||
local image="$1"
|
||||
local project_name
|
||||
project_name="$(resolve_project_name)"
|
||||
[[ "$image" == "${project_name}:"* ]]
|
||||
}
|
||||
|
||||
canonical_path(){
|
||||
local path="$1"
|
||||
if command -v realpath >/dev/null 2>&1; then
|
||||
@@ -300,8 +307,12 @@ TARGET_WORLDSERVER_IMAGE_MODULES="$(read_env AC_WORLDSERVER_IMAGE_MODULES "$(res
|
||||
if [ "$TARGET_PROFILE" = "modules" ]; then
|
||||
# Check if source image exists
|
||||
if ! docker image inspect "$TARGET_WORLDSERVER_IMAGE_MODULES" >/dev/null 2>&1; then
|
||||
echo "📦 Modules image $TARGET_WORLDSERVER_IMAGE_MODULES not found - rebuild needed"
|
||||
REBUILD_NEEDED=1
|
||||
if is_project_local_image "$TARGET_WORLDSERVER_IMAGE_MODULES"; then
|
||||
echo "📦 Modules image $TARGET_WORLDSERVER_IMAGE_MODULES not found - rebuild needed"
|
||||
REBUILD_NEEDED=1
|
||||
else
|
||||
echo "ℹ️ Modules image $TARGET_WORLDSERVER_IMAGE_MODULES missing locally but not tagged with the project prefix; assuming compose will pull from your registry."
|
||||
fi
|
||||
elif [ -f "$SENTINEL_FILE" ]; then
|
||||
echo "🔄 Modules changed since last build - rebuild needed"
|
||||
REBUILD_NEEDED=1
|
||||
|
||||
Reference in New Issue
Block a user