mirror of
https://github.com/uprightbass360/AzerothCore-RealmMaster.git
synced 2026-01-13 00:58:34 +00:00
backup imports
This commit is contained in:
@@ -34,7 +34,7 @@ Place your database backup files here for automatic import during deployment.
|
||||
|
||||
## What Happens
|
||||
- Individual `.sql`/`.sql.gz` files are copied to `storage/backups/daily/` with a timestamped name
|
||||
- Full backup directories or archives are staged in `storage/backups/ImportBackup/`
|
||||
- Full backup directories or archives are staged directly under `storage/backups/` (e.g., `storage/backups/ExportBackup_20241029_120000/`)
|
||||
- Database import system automatically restores the most recent matching backup
|
||||
- Original files remain here for reference (archives are left untouched)
|
||||
|
||||
|
||||
@@ -443,7 +443,7 @@ docker exec -it ac-mysql mysql -u root -p
|
||||
./scripts/bash/backup-export.sh storage/backups/ExportBackup_manual_$(date +%Y%m%d_%H%M%S)
|
||||
|
||||
# Import data from a directory that contains the SQL dumps
|
||||
./scripts/bash/backup-import.sh --backup-dir storage/backups/ImportBackup --password azerothcore123
|
||||
./scripts/bash/backup-import.sh --backup-dir storage/backups/ExportBackup_20241029_120000 --password azerothcore123
|
||||
|
||||
# View available backups
|
||||
ls -la storage/backups/
|
||||
|
||||
@@ -127,13 +127,13 @@ ExportBackup_YYYYMMDD_HHMMSS/
|
||||
Restores user accounts and characters from backup while preserving world data.
|
||||
|
||||
```bash
|
||||
./scripts/bash/backup-import.sh --backup-dir storage/backups/ImportBackup --password azerothcore123
|
||||
./scripts/bash/backup-import.sh --backup-dir storage/backups/ExportBackup_20241029_120000 --password azerothcore123
|
||||
|
||||
# Restore directly from an ExportBackup archive you just unpacked
|
||||
./scripts/bash/backup-import.sh --backup-dir ExportBackup_20241029_120000 --password azerothcore123 --all
|
||||
```
|
||||
|
||||
> The importer always requires `--backup-dir`. A common workflow is to extract an `ExportBackup_*` archive into `storage/backups/ImportBackup/` and pass that directory to the script, but you can point to any folder that contains the SQL dumps.
|
||||
> The importer always requires `--backup-dir`. A common workflow is to extract an `ExportBackup_*` archive into `storage/backups/` (so automated jobs can see it) and pass that directory to the script, but you can point to any folder that contains the SQL dumps.
|
||||
|
||||
**Required Files:**
|
||||
- `acore_auth.sql[.gz]` - User accounts (required)
|
||||
|
||||
@@ -105,12 +105,12 @@ ExportBackup_YYYYMMDD_HHMMSS/ # Created by scripts/bash/backup-export.sh
|
||||
├── acore_characters.sql.gz # Character data
|
||||
└── manifest.json
|
||||
|
||||
ImportBackup/ # Optional staging area (use with --backup-dir)
|
||||
├── acore_auth.sql[.gz] # Required: accounts
|
||||
├── acore_characters.sql[.gz] # Required: characters
|
||||
└── acore_world.sql[.gz] # Optional: world data
|
||||
ExportBackup_YYYYMMDD_HHMMSS/ # Optional manual drop-in under storage/backups/
|
||||
├── acore_auth.sql.gz
|
||||
├── acore_characters.sql.gz
|
||||
└── manifest.json
|
||||
|
||||
Place extracted dumps from any `ExportBackup_*` archive into this directory (or any other directory you prefer) and pass it to `scripts/bash/backup-import.sh --backup-dir <path>` when performing a manual restore.
|
||||
Place extracted dumps from any `ExportBackup_*` archive inside `storage/backups/` (for automatic detection) or pass the directory directly to `scripts/bash/backup-import.sh --backup-dir <path>` when performing a manual restore.
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -96,7 +96,6 @@ BACKUP_SEARCH_PATHS=(
|
||||
"/var/lib/mysql-persistent"
|
||||
"$PROJECT_ROOT/storage/backups"
|
||||
"$PROJECT_ROOT/manual-backups"
|
||||
"$PROJECT_ROOT/storage/backups/ImportBackup"
|
||||
)
|
||||
|
||||
backup_path=""
|
||||
@@ -149,7 +148,7 @@ if [ -z "$backup_path" ]; then
|
||||
echo "📦 Latest hourly backup found: $latest_hourly"
|
||||
for backup_file in "$BACKUP_DIRS/hourly/$latest_hourly"/*.sql.gz; do
|
||||
if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then
|
||||
if timeout 10 zcat "$backup_file" 2>/dev/null | head -20 | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then
|
||||
if timeout 10 zcat "$backup_file" >/dev/null 2>&1; then
|
||||
echo "✅ Valid hourly backup file: $(basename "$backup_file")"
|
||||
backup_path="$BACKUP_DIRS/hourly/$latest_hourly"
|
||||
break 2
|
||||
@@ -171,7 +170,7 @@ if [ -z "$backup_path" ]; then
|
||||
echo "🔍 Validating timestamped backup content..."
|
||||
for backup_file in "$BACKUP_DIRS/$latest_timestamped"/*.sql.gz; do
|
||||
if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then
|
||||
if timeout 10 zcat "$backup_file" 2>/dev/null | head -20 | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then
|
||||
if timeout 10 zcat "$backup_file" >/dev/null 2>&1; then
|
||||
echo "✅ Valid timestamped backup found: $(basename "$backup_file")"
|
||||
backup_path="$BACKUP_DIRS/$latest_timestamped"
|
||||
break 2
|
||||
@@ -189,7 +188,7 @@ if [ -z "$backup_path" ]; then
|
||||
latest_manual=$(ls -1t "$BACKUP_DIRS"/*.sql 2>/dev/null | head -n 1)
|
||||
if [ -n "$latest_manual" ] && [ -f "$latest_manual" ]; then
|
||||
echo "📦 Found manual backup: $(basename "$latest_manual")"
|
||||
if timeout 10 head -20 "$latest_manual" 2>/dev/null | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then
|
||||
if timeout 10 head -20 "$latest_manual" >/dev/null 2>&1; then
|
||||
echo "✅ Valid manual backup file: $(basename "$latest_manual")"
|
||||
backup_path="$latest_manual"
|
||||
break
|
||||
@@ -316,6 +315,15 @@ Updates.EnableDatabases = 7
|
||||
Updates.AutoSetup = 1
|
||||
TempDir = "${TEMP_DIR}"
|
||||
MySQLExecutable = "${MYSQL_EXECUTABLE}"
|
||||
Updates.AllowedModules = "all"
|
||||
LoginDatabase.WorkerThreads = 1
|
||||
LoginDatabase.SynchThreads = 1
|
||||
WorldDatabase.WorkerThreads = 1
|
||||
WorldDatabase.SynchThreads = 1
|
||||
CharacterDatabase.WorkerThreads = 1
|
||||
CharacterDatabase.SynchThreads = 1
|
||||
SourceDirectory = "/azerothcore"
|
||||
Updates.ExceptionShutdownDelay = 10000
|
||||
EOF
|
||||
|
||||
echo "🚀 Running database import..."
|
||||
|
||||
@@ -12,8 +12,8 @@ fi
|
||||
IMPORT_DIR="./database-import"
|
||||
STORAGE_PATH="${STORAGE_PATH:-./storage}"
|
||||
STORAGE_PATH_LOCAL="${STORAGE_PATH_LOCAL:-./local-storage}"
|
||||
BACKUP_DIR="${STORAGE_PATH}/backups/daily"
|
||||
FULL_BACKUP_DIR="${STORAGE_PATH}/backups/ImportBackup"
|
||||
BACKUP_ROOT="${STORAGE_PATH}/backups"
|
||||
BACKUP_DIR="${BACKUP_ROOT}/daily"
|
||||
TIMESTAMP=$(date +%Y-%m-%d)
|
||||
|
||||
shopt -s nullglob
|
||||
@@ -47,7 +47,7 @@ echo "📥 Found database files in $IMPORT_DIR"
|
||||
echo "📂 Copying to backup system for import..."
|
||||
|
||||
# Ensure backup directory exists
|
||||
mkdir -p "$BACKUP_DIR" "$FULL_BACKUP_DIR"
|
||||
mkdir -p "$BACKUP_DIR" "$BACKUP_ROOT"
|
||||
|
||||
generate_unique_path(){
|
||||
local target="$1"
|
||||
@@ -98,9 +98,12 @@ done
|
||||
|
||||
stage_backup_directory(){
|
||||
local src_dir="$1"
|
||||
if [ -z "$src_dir" ] || [ ! -d "$src_dir" ]; then
|
||||
return
|
||||
fi
|
||||
local dirname
|
||||
dirname="$(basename "$src_dir")"
|
||||
local dest="$FULL_BACKUP_DIR/$dirname"
|
||||
local dest="$BACKUP_ROOT/$dirname"
|
||||
dest="$(generate_unique_path "$dest")"
|
||||
echo "📦 Staging full backup directory $(basename "$src_dir") → $(basename "$dest")"
|
||||
cp -a "$src_dir" "$dest"
|
||||
@@ -152,13 +155,13 @@ extract_archive(){
|
||||
if [ ${#entries[@]} -eq 1 ] && [ -d "${entries[0]}" ]; then
|
||||
local inner_name
|
||||
inner_name="$(basename "${entries[0]}")"
|
||||
dest="$FULL_BACKUP_DIR/$inner_name"
|
||||
dest="$BACKUP_ROOT/$inner_name"
|
||||
dest="$(generate_unique_path "$dest")"
|
||||
mv "${entries[0]}" "$dest"
|
||||
else
|
||||
local base="${base_name%.*}"
|
||||
base="${base%.*}" # handle double extensions like .tar.gz
|
||||
dest="$(generate_unique_path "$FULL_BACKUP_DIR/$base")"
|
||||
dest="$(generate_unique_path "$BACKUP_ROOT/$base")"
|
||||
mkdir -p "$dest"
|
||||
if [ ${#entries[@]} -gt 0 ]; then
|
||||
mv "${entries[@]}" "$dest"/
|
||||
@@ -183,12 +186,12 @@ fi
|
||||
if [ "$staged_dirs" -gt 0 ]; then
|
||||
dir_label="directories"
|
||||
[ "$staged_dirs" -eq 1 ] && dir_label="directory"
|
||||
echo "✅ $staged_dirs full backup $dir_label staged in $FULL_BACKUP_DIR"
|
||||
echo "✅ $staged_dirs full backup $dir_label staged in $BACKUP_ROOT"
|
||||
fi
|
||||
if [ "$staged_archives" -gt 0 ]; then
|
||||
archive_label="archives"
|
||||
[ "$staged_archives" -eq 1 ] && archive_label="archive"
|
||||
echo "✅ $staged_archives backup $archive_label extracted to $FULL_BACKUP_DIR"
|
||||
echo "✅ $staged_archives backup $archive_label extracted to $BACKUP_ROOT"
|
||||
fi
|
||||
|
||||
if [ "$copied_sql" -eq 0 ] && [ "$staged_dirs" -eq 0 ] && [ "$staged_archives" -eq 0 ]; then
|
||||
|
||||
Reference in New Issue
Block a user