From aaa01e705737e28be21c283a8fde9e9378c06648 Mon Sep 17 00:00:00 2001 From: uprightbass360 Date: Wed, 29 Oct 2025 22:50:04 -0400 Subject: [PATCH] fix: backup script refactor --- backup-export.sh | 321 ++++++++++++++--- backup-import.sh | 567 +++++++++++++++++++++++++------ scripts/db-import-conditional.sh | 240 +++++++++---- 3 files changed, 893 insertions(+), 235 deletions(-) diff --git a/backup-export.sh b/backup-export.sh index 53c1f55..75f8adf 100755 --- a/backup-export.sh +++ b/backup-export.sh @@ -1,85 +1,298 @@ #!/bin/bash -# Export auth and character databases to ExportBackup_/ +# Export one or more ACore databases to ExportBackup_/ set -euo pipefail -# Get the directory where this script is located +INVOCATION_DIR="$PWD" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -# Change to the script directory to ensure relative paths work correctly cd "$SCRIPT_DIR" +SUPPORTED_DBS=(auth characters world) +declare -A SUPPORTED_SET=() +for db in "${SUPPORTED_DBS[@]}"; do + SUPPORTED_SET["$db"]=1 +done + +declare -A DB_NAMES=([auth]="" [characters]="" [world]="") +declare -a INCLUDE_DBS=() +declare -a SKIP_DBS=() + +MYSQL_PW="" +DEST_PARENT="" +DEST_PROVIDED=false +EXPLICIT_SELECTION=false + usage(){ - cat < + cat <<'EOF' +Usage: ./backup-export.sh [options] [legacy positional args] -Creates a timestamped backup of the auth and character databases. +Creates a timestamped backup of one or more ACore databases. -Arguments: - [output_dir] Output directory (default: .) - MySQL root password (required) - Auth database name (required) - Characters database name (required) +Options: + -o, --output DIR Destination directory (default: script directory) + -p, --password PASS MySQL root password + --auth-db NAME Auth database schema name + --characters-db NAME Characters database schema name + --world-db NAME World database schema name + --db LIST Comma-separated list of databases to export + --skip LIST Comma-separated list of databases to skip + -h, --help Show this help and exit -Outputs: - ExportBackup_YYYYMMDD_HHMMSS/ - acore_auth.sql.gz - acore_characters.sql.gz - manifest.json +Supported database identifiers: auth, characters, world. -Services stay online; backup uses mysqldump. +Legacy positional forms are still supported: + ./backup-export.sh + ./backup-export.sh + ./backup-export.sh [world_db] + +In legacy mode all provided databases are exported. EOF } -case "${1:-}" in - -h|--help) usage; exit 0;; -esac +err(){ printf 'Error: %s\n' "$*" >&2; } +die(){ err "$1"; exit 1; } -# Check if required parameters are provided (minimum 3: password, auth_db, char_db) -if [[ $# -lt 3 ]]; then - echo "Error: Required parameters missing. Usage: ./backup-export.sh [output_dir] " >&2 - exit 1 +normalize_token(){ + printf '%s' "$1" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]' +} + +add_unique(){ + local -n arr="$1" + local value="$2" + for existing in "${arr[@]:-}"; do + [[ "$existing" == "$value" ]] && return + done + arr+=("$value") +} + +parse_db_list(){ + local -n target="$1" + local value="$2" + IFS=',' read -ra parts <<<"$value" + for part in "${parts[@]}"; do + local token + token="$(normalize_token "$part")" + [[ -z "$token" ]] && continue + if [[ -z "${SUPPORTED_SET[$token]:-}" ]]; then + die "Unknown database identifier: $token (supported: ${SUPPORTED_DBS[*]})" + fi + add_unique target "$token" + done +} + +remove_from_list(){ + local -n arr="$1" + local value="$2" + local -a filtered=() + for item in "${arr[@]}"; do + [[ "$item" == "$value" ]] || filtered+=("$item") + done + arr=("${filtered[@]}") +} + +resolve_relative(){ + local base="$1" path="$2" + if command -v python3 >/dev/null 2>&1; then + python3 - "$base" "$path" <<'PY' +import os, sys +base, path = sys.argv[1:3] +if not path: + print(os.path.abspath(base)) +elif os.path.isabs(path): + print(os.path.normpath(path)) +else: + print(os.path.normpath(os.path.join(base, path))) +PY + else + die "python3 is required but was not found on PATH" + fi +} + +json_string(){ + if ! command -v python3 >/dev/null 2>&1; then + die "python3 is required but was not found on PATH" + fi + python3 - "$1" <<'PY' +import json, sys +print(json.dumps(sys.argv[1])) +PY +} + +POSITIONAL=() +while [[ $# -gt 0 ]]; do + case "$1" in + -o|--output) + [[ $# -ge 2 ]] || die "--output requires a directory argument" + DEST_PARENT="$2" + DEST_PROVIDED=true + shift 2 + ;; + -p|--password) + [[ $# -ge 2 ]] || die "--password requires a value" + MYSQL_PW="$2" + shift 2 + ;; + --auth-db) + [[ $# -ge 2 ]] || die "--auth-db requires a value" + DB_NAMES[auth]="$2" + shift 2 + ;; + --characters-db) + [[ $# -ge 2 ]] || die "--characters-db requires a value" + DB_NAMES[characters]="$2" + shift 2 + ;; + --world-db) + [[ $# -ge 2 ]] || die "--world-db requires a value" + DB_NAMES[world]="$2" + shift 2 + ;; + --db|--only) + [[ $# -ge 2 ]] || die "--db requires a value" + EXPLICIT_SELECTION=true + parse_db_list INCLUDE_DBS "$2" + shift 2 + ;; + --skip) + [[ $# -ge 2 ]] || die "--skip requires a value" + parse_db_list SKIP_DBS "$2" + shift 2 + ;; + -h|--help) + usage + exit 0 + ;; + --) + shift + while [[ $# -gt 0 ]]; do + POSITIONAL+=("$1") + shift + done + break + ;; + -*) + die "Unknown option: $1" + ;; + *) + POSITIONAL+=("$1") + shift + ;; + esac +done + +if ((${#POSITIONAL[@]} > 0)); then + case ${#POSITIONAL[@]} in + 3) + MYSQL_PW="${POSITIONAL[0]}" + DB_NAMES[auth]="${POSITIONAL[1]}" + DB_NAMES[characters]="${POSITIONAL[2]}" + INCLUDE_DBS=(auth characters) + EXPLICIT_SELECTION=true + ;; + 4) + DEST_PARENT="${POSITIONAL[0]}" + DEST_PROVIDED=true + MYSQL_PW="${POSITIONAL[1]}" + DB_NAMES[auth]="${POSITIONAL[2]}" + DB_NAMES[characters]="${POSITIONAL[3]}" + INCLUDE_DBS=(auth characters) + EXPLICIT_SELECTION=true + ;; + 5) + DEST_PARENT="${POSITIONAL[0]}" + DEST_PROVIDED=true + MYSQL_PW="${POSITIONAL[1]}" + DB_NAMES[auth]="${POSITIONAL[2]}" + DB_NAMES[characters]="${POSITIONAL[3]}" + DB_NAMES[world]="${POSITIONAL[4]}" + INCLUDE_DBS=(auth characters world) + EXPLICIT_SELECTION=true + ;; + *) + die "Unrecognized positional arguments. Run --help for usage." + ;; + esac fi -# Handle both cases: with and without output_dir parameter -if [[ $# -eq 3 ]]; then - # No output_dir provided, use current script directory - DEST_PARENT="." - MYSQL_PW="$1" - DB_AUTH="$2" - DB_CHAR="$3" -elif [[ $# -ge 4 ]]; then - # output_dir provided - DEST_PARENT="$1" - MYSQL_PW="$2" - DB_AUTH="$3" - DB_CHAR="$4" +declare -a ACTIVE_DBS=() +if $EXPLICIT_SELECTION; then + ACTIVE_DBS=("${INCLUDE_DBS[@]}") +else + for db in "${SUPPORTED_DBS[@]}"; do + if [[ -n "${DB_NAMES[$db]}" ]]; then + add_unique ACTIVE_DBS "$db" + fi + done + if ((${#ACTIVE_DBS[@]} == 0)); then + ACTIVE_DBS=(auth characters) + fi fi -# Convert output directory to absolute path if it's relative -if [[ ! "$DEST_PARENT" = /* ]]; then - DEST_PARENT="$SCRIPT_DIR/$DEST_PARENT" +for skip in "${SKIP_DBS[@]:-}"; do + remove_from_list ACTIVE_DBS "$skip" +done + +if ((${#ACTIVE_DBS[@]} == 0)); then + die "No databases selected for export." +fi + +[[ -n "$MYSQL_PW" ]] || die "MySQL password is required (use --password)." + +for db in "${ACTIVE_DBS[@]}"; do + case "$db" in + auth|characters|world) ;; + *) die "Unsupported database identifier requested: $db" ;; + esac + if [[ -z "${DB_NAMES[$db]}" ]]; then + die "Missing schema name for '$db'. Provide --${db}-db." + fi +done + +if $DEST_PROVIDED; then + DEST_PARENT="$(resolve_relative "$INVOCATION_DIR" "$DEST_PARENT")" +else + # Use storage/backups as default to align with existing backup structure + if [ -d "$SCRIPT_DIR/storage" ]; then + DEST_PARENT="$SCRIPT_DIR/storage/backups" + mkdir -p "$DEST_PARENT" + else + DEST_PARENT="$SCRIPT_DIR" + fi fi TIMESTAMP="$(date +%Y%m%d_%H%M%S)" -DEST_DIR="${DEST_PARENT%/}/ExportBackup_${TIMESTAMP}" +DEST_DIR="$(printf '%s/ExportBackup_%s' "$DEST_PARENT" "$TIMESTAMP")" mkdir -p "$DEST_DIR" +generated_at="$(date --iso-8601=seconds)" dump_db(){ - local db="$1" outfile="$2" - echo "Dumping $db -> $outfile" - docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$db" | gzip > "$outfile" + local schema="$1" outfile="$2" + echo "Dumping ${schema} -> ${outfile}" + docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$schema" | gzip > "$outfile" } -dump_db "$DB_AUTH" "$DEST_DIR/acore_auth.sql.gz" -dump_db "$DB_CHAR" "$DEST_DIR/acore_characters.sql.gz" +for db in "${ACTIVE_DBS[@]}"; do + outfile="$DEST_DIR/acore_${db}.sql.gz" + dump_db "${DB_NAMES[$db]}" "$outfile" +done -cat > "$DEST_DIR/manifest.json" < "$DEST_DIR/manifest.json" +echo "Exported databases: ${ACTIVE_DBS[*]}" echo "Backups saved under $DEST_DIR" diff --git a/backup-import.sh b/backup-import.sh index d957f9e..45ae198 100755 --- a/backup-import.sh +++ b/backup-import.sh @@ -1,10 +1,9 @@ #!/bin/bash -# Restore auth and character databases from ImportBackup/ and verify service health. +# Restore one or more ACore databases from a backup directory. set -euo pipefail -# Get the directory where this script is located +INVOCATION_DIR="$PWD" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -# Change to the script directory to ensure relative paths work correctly cd "$SCRIPT_DIR" COLOR_RED='\033[0;31m' @@ -15,140 +14,486 @@ COLOR_RESET='\033[0m' log(){ printf '%b\n' "${COLOR_GREEN}$*${COLOR_RESET}"; } warn(){ printf '%b\n' "${COLOR_YELLOW}$*${COLOR_RESET}"; } err(){ printf '%b\n' "${COLOR_RED}$*${COLOR_RESET}"; } +fatal(){ err "$*"; exit 1; } + +SUPPORTED_DBS=(auth characters world) +declare -A SUPPORTED_SET=() +for db in "${SUPPORTED_DBS[@]}"; do + SUPPORTED_SET["$db"]=1 +done + +declare -A DB_NAMES=([auth]="" [characters]="" [world]="") +declare -a INCLUDE_DBS=() +declare -a SKIP_DBS=() +declare -a ACTIVE_DBS=() + +MYSQL_PW="" +BACKUP_DIR="" +BACKUP_PROVIDED=false +EXPLICIT_SELECTION=false usage(){ - cat < + cat <<'EOF' +Usage: ./backup-import.sh [options] -Restores user accounts and characters from a backup folder. +Restores selected ACore databases from a backup directory. -Arguments: - [backup_dir] Backup directory (default: ImportBackup/) - MySQL root password (required) - Auth database name (required) - Characters database name (required) - World database name (required) +Options: + -b, --backup-dir DIR Backup directory (default: ImportBackup/) + -p, --password PASS MySQL root password + --auth-db NAME Auth database schema name + --characters-db NAME Characters database schema name + --world-db NAME World database schema name + --db LIST Comma-separated list of databases to import; may be repeated + --skip LIST Comma-separated list of databases to skip + --all Import all supported databases + -h, --help Show this help and exit -Required files: - acore_auth.sql or acore_auth.sql.gz - acore_characters.sql or acore_characters.sql.gz -Optional file (will prompt): - acore_world.sql or acore_world.sql.gz +Supported database identifiers: auth, characters, world. +By default the script restores auth and characters. Use --db/--skip to +customize the selection. -Steps performed: - 1. Stop world/auth services - 2. Back up current auth/character DBs to manual-backups/ - 3. Import provided dumps - 4. Re-run module SQL to restore customizations - 5. Restart services to reinitialize GUID generators - 6. Show status summary +Legacy positional forms are still supported: + ./backup-import.sh + ./backup-import.sh -Note: Service restart is required to ensure character GUID generators -are properly updated after importing characters. +Legacy usage always imports the databases provided in the invocation. EOF } +normalize_token(){ + printf '%s' "$1" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]' +} + +add_unique(){ + local -n arr="$1" + local value="$2" + for existing in "${arr[@]:-}"; do + [[ "$existing" == "$value" ]] && return + done + arr+=("$value") +} + +parse_db_list(){ + local -n target="$1" + local value="$2" + IFS=',' read -ra parts <<<"$value" + for part in "${parts[@]}"; do + local token + token="$(normalize_token "$part")" + [[ -z "$token" ]] && continue + if [[ -z "${SUPPORTED_SET[$token]:-}" ]]; then + fatal "Unknown database identifier: $token (supported: ${SUPPORTED_DBS[*]})" + fi + add_unique target "$token" + done +} + +remove_from_list(){ + local -n arr="$1" + local value="$2" + local -a filtered=() + for item in "${arr[@]}"; do + [[ "$item" == "$value" ]] || filtered+=("$item") + done + arr=("${filtered[@]}") +} + +resolve_relative(){ + local base="$1" path="$2" + if command -v python3 >/dev/null 2>&1; then + python3 - "$base" "$path" <<'PY' +import os, sys +base, path = sys.argv[1:3] +if not path: + print(os.path.abspath(base)) +elif os.path.isabs(path): + print(os.path.normpath(path)) +else: + print(os.path.normpath(os.path.join(base, path))) +PY + else + fatal "python3 is required but was not found on PATH" + fi +} + +load_manifest(){ + local path="$1" + [[ -f "$path" ]] || return 0 + if ! command -v python3 >/dev/null 2>&1; then + fatal "python3 is required to read $path" + fi + while IFS='=' read -r key value; do + [[ -n "$key" && -n "$value" ]] || continue + local token + token="$(normalize_token "$key")" + [[ -n "${SUPPORTED_SET[$token]:-}" ]] || continue + if [[ -z "${DB_NAMES[$token]}" ]]; then + DB_NAMES[$token]="$value" + fi + done < <(python3 - "$path" <<'PY' +import json, sys + +SUPPORTED = { + "auth": {"keys": {"auth"}, "schemas": {"acore_auth"}}, + "characters": {"keys": {"characters", "chars", "char"}, "schemas": {"acore_characters"}}, + "world": {"keys": {"world"}, "schemas": {"acore_world"}}, +} + +def map_entry(key, value, result): + if key and key in SUPPORTED: + result[key] = value + return + value_lower = value.lower() + for ident, meta in SUPPORTED.items(): + if value_lower in meta["schemas"]: + result.setdefault(ident, value) + return + if key: + for ident, meta in SUPPORTED.items(): + if key in meta["keys"]: + result.setdefault(ident, value) + return + +def main(): + path = sys.argv[1] + with open(path, "r", encoding="utf-8") as fh: + data = json.load(fh) + result = {} + databases = data.get("databases") + if isinstance(databases, dict): + for key, value in databases.items(): + map_entry(key.lower(), str(value), result) + elif isinstance(databases, list): + for value in databases: + map_entry("", str(value), result) + for key, value in result.items(): + print(f"{key}={value}") + +if __name__ == "__main__": + main() +PY +) +} + +find_dump(){ + local db="$1" + local hint="${DB_NAMES[$db]}" + if ! command -v python3 >/dev/null 2>&1; then + fatal "python3 is required to locate backup dumps" + fi + python3 - "$BACKUP_DIR" "$db" "$hint" <<'PY' +import glob, os, sys +backup_dir, db, hint = sys.argv[1:4] + +# Search patterns for database dumps +patterns = [ + f"acore_{db}.sql.gz", + f"acore_{db}.sql", + f"{db}.sql.gz", + f"{db}.sql", +] +if hint: + patterns = [f"{hint}.sql.gz", f"{hint}.sql"] + patterns + +# Search locations (in order of preference) +search_dirs = [] + +# Check for daily backups first (most recent) +daily_dir = os.path.join(backup_dir, "daily") +if os.path.isdir(daily_dir): + daily_subdirs = [d for d in os.listdir(daily_dir) if os.path.isdir(os.path.join(daily_dir, d))] + if daily_subdirs: + latest_daily = max(daily_subdirs, key=lambda x: os.path.getmtime(os.path.join(daily_dir, x))) + search_dirs.append(os.path.join(daily_dir, latest_daily)) + +# Check for hourly backups +hourly_dir = os.path.join(backup_dir, "hourly") +if os.path.isdir(hourly_dir): + hourly_subdirs = [d for d in os.listdir(hourly_dir) if os.path.isdir(os.path.join(hourly_dir, d))] + if hourly_subdirs: + latest_hourly = max(hourly_subdirs, key=lambda x: os.path.getmtime(os.path.join(hourly_dir, x))) + search_dirs.append(os.path.join(hourly_dir, latest_hourly)) + +# Check for timestamped backup directories +timestamped_dirs = [] +try: + for item in os.listdir(backup_dir): + item_path = os.path.join(backup_dir, item) + if os.path.isdir(item_path): + # Match ExportBackup_YYYYMMDD_HHMMSS or just YYYYMMDD_HHMMSS + if item.startswith("ExportBackup_") or (len(item) == 15 and item[8] == '_'): + timestamped_dirs.append(item_path) +except OSError: + pass + +if timestamped_dirs: + latest_timestamped = max(timestamped_dirs, key=os.path.getmtime) + search_dirs.append(latest_timestamped) + +# Add the main backup directory itself +search_dirs.append(backup_dir) + +# Search for matching dumps +seen = {} +matches = [] + +for search_dir in search_dirs: + for pattern in patterns: + for path in glob.glob(os.path.join(search_dir, pattern)): + if path not in seen and os.path.isfile(path): + seen[path] = True + matches.append(path) + +if not matches: + sys.exit(1) + +# Return the most recent match +latest = max(matches, key=os.path.getmtime) +print(latest) +PY +} + +guess_schema_from_dump(){ + local dump="$1" + local base + base="$(basename "$dump")" + case "$base" in + acore_auth.sql|acore_auth.sql.gz) echo "acore_auth" ;; + acore_characters.sql|acore_characters.sql.gz) echo "acore_characters" ;; + acore_world.sql|acore_world.sql.gz) echo "acore_world" ;; + *) + if [[ "$base" =~ ^([A-Za-z0-9_-]+)\.sql(\.gz)?$ ]]; then + echo "${BASH_REMATCH[1]}" + fi + ;; + esac +} + +timestamp(){ date +%Y%m%d_%H%M%S; } + +backup_db(){ + local schema="$1" label="$2" + local out="manual-backups/${label}-pre-import-$(timestamp).sql" + mkdir -p manual-backups + log "Backing up current ${schema} to ${out}" + docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$schema" > "$out" +} + +restore(){ + local schema="$1" dump="$2" + log "Importing ${dump##*/} into ${schema}" + case "$dump" in + *.gz) gzip -dc "$dump" ;; + *.sql) cat "$dump" ;; + *) fatal "Unsupported dump format: $dump" ;; + esac | docker exec -i ac-mysql mysql -uroot -p"$MYSQL_PW" "$schema" +} + +db_selected(){ + local needle="$1" + for item in "${ACTIVE_DBS[@]}"; do + [[ "$item" == "$needle" ]] && return 0 + done + return 1 +} + +count_rows(){ + docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e "$1" +} + case "${1:-}" in -h|--help) usage; exit 0;; esac -# Check if required parameters are provided (minimum 4: password, auth_db, char_db, world_db) -if [[ $# -lt 4 ]]; then - err "Required parameters missing. Usage: ./backup-import.sh [backup_dir] " - exit 1 +POSITIONAL=() +while [[ $# -gt 0 ]]; do + case "$1" in + -b|--backup-dir) + [[ $# -ge 2 ]] || fatal "--backup-dir requires a directory argument" + BACKUP_DIR="$2" + BACKUP_PROVIDED=true + shift 2 + ;; + -p|--password) + [[ $# -ge 2 ]] || fatal "--password requires a value" + MYSQL_PW="$2" + shift 2 + ;; + --auth-db) + [[ $# -ge 2 ]] || fatal "--auth-db requires a value" + DB_NAMES[auth]="$2" + shift 2 + ;; + --characters-db) + [[ $# -ge 2 ]] || fatal "--characters-db requires a value" + DB_NAMES[characters]="$2" + shift 2 + ;; + --world-db) + [[ $# -ge 2 ]] || fatal "--world-db requires a value" + DB_NAMES[world]="$2" + shift 2 + ;; + --db|--only) + [[ $# -ge 2 ]] || fatal "--db requires a value" + EXPLICIT_SELECTION=true + parse_db_list INCLUDE_DBS "$2" + shift 2 + ;; + --skip) + [[ $# -ge 2 ]] || fatal "--skip requires a value" + parse_db_list SKIP_DBS "$2" + shift 2 + ;; + --all) + EXPLICIT_SELECTION=true + for db in "${SUPPORTED_DBS[@]}"; do + add_unique INCLUDE_DBS "$db" + done + shift + ;; + -h|--help) + usage + exit 0 + ;; + --) + shift + while [[ $# -gt 0 ]]; do + POSITIONAL+=("$1") + shift + done + break + ;; + -*) + fatal "Unknown option: $1" + ;; + *) + POSITIONAL+=("$1") + shift + ;; + esac +done + +if ((${#POSITIONAL[@]} > 0)); then + case ${#POSITIONAL[@]} in + 4) + MYSQL_PW="${POSITIONAL[0]}" + DB_NAMES[auth]="${POSITIONAL[1]}" + DB_NAMES[characters]="${POSITIONAL[2]}" + DB_NAMES[world]="${POSITIONAL[3]}" + INCLUDE_DBS=(auth characters world) + EXPLICIT_SELECTION=true + ;; + 5) + BACKUP_DIR="${POSITIONAL[0]}" + BACKUP_PROVIDED=true + MYSQL_PW="${POSITIONAL[1]}" + DB_NAMES[auth]="${POSITIONAL[2]}" + DB_NAMES[characters]="${POSITIONAL[3]}" + DB_NAMES[world]="${POSITIONAL[4]}" + INCLUDE_DBS=(auth characters world) + EXPLICIT_SELECTION=true + ;; + *) + fatal "Unrecognized positional arguments. Run --help for usage." + ;; + esac fi -# Handle both cases: with and without backup_dir parameter -if [[ $# -eq 4 ]]; then - # No backup_dir provided, use default - BACKUP_DIR="ImportBackup" - MYSQL_PW="$1" - DB_AUTH="$2" - DB_CHAR="$3" - DB_WORLD="$4" -elif [[ $# -ge 5 ]]; then - # backup_dir provided - convert to absolute path if relative - BACKUP_DIR="$1" - MYSQL_PW="$2" - DB_AUTH="$3" - DB_CHAR="$4" - DB_WORLD="$5" +if $EXPLICIT_SELECTION; then + ACTIVE_DBS=("${INCLUDE_DBS[@]}") +else + ACTIVE_DBS=(auth characters) fi -# Convert backup directory to absolute path if it's relative -if [[ ! "$BACKUP_DIR" = /* ]]; then - BACKUP_DIR="$SCRIPT_DIR/$BACKUP_DIR" +for skip in "${SKIP_DBS[@]:-}"; do + remove_from_list ACTIVE_DBS "$skip" +done + +if ((${#ACTIVE_DBS[@]} == 0)); then + fatal "No databases selected for import." fi -require_file(){ - local file="$1" - [[ -f "$file" ]] || { err "Missing required backup file: $file"; exit 1; } -} +if $BACKUP_PROVIDED; then + BACKUP_DIR="$(resolve_relative "$INVOCATION_DIR" "$BACKUP_DIR")" +else + # Auto-discover backup directory from available options + BACKUP_SEARCH_PATHS=( + "$SCRIPT_DIR/ImportBackup" + "$SCRIPT_DIR/storage/backups" + "$SCRIPT_DIR/manual-backups" + "/backups" + "/var/lib/mysql-persistent" + ) -if [[ ! -d "$BACKUP_DIR" ]]; then - err "Backup directory not found: $BACKUP_DIR" - exit 1 + BACKUP_DIR="" + for candidate in "${BACKUP_SEARCH_PATHS[@]}"; do + if [ -d "$candidate" ] && [ -n "$(ls -A "$candidate" 2>/dev/null)" ]; then + # Check if this directory contains backups + if [ -d "$candidate/daily" ] || [ -d "$candidate/hourly" ] || \ + ls "$candidate"/*.sql.gz >/dev/null 2>&1 || ls "$candidate"/*.sql >/dev/null 2>&1 || \ + ls "$candidate"/ExportBackup_* >/dev/null 2>&1 || ls "$candidate"/20[0-9][0-9][0-9][0-9][0-9][0-9]_* >/dev/null 2>&1; then + log "Auto-discovered backup directory: $candidate" + BACKUP_DIR="$candidate" + break + fi + fi + done + + if [ -z "$BACKUP_DIR" ]; then + BACKUP_DIR="$SCRIPT_DIR/ImportBackup" + warn "No backup directory auto-discovered, using default: $BACKUP_DIR" + fi fi -AUTH_DUMP=$(find "$BACKUP_DIR" -maxdepth 1 -name 'acore_auth.sql*' | head -n1 || true) -CHAR_DUMP=$(find "$BACKUP_DIR" -maxdepth 1 -name 'acore_characters.sql*' | head -n1 || true) -WORLD_DUMP=$(find "$BACKUP_DIR" -maxdepth 1 -name 'acore_world.sql*' | head -n1 || true) +[[ -d "$BACKUP_DIR" ]] || fatal "Backup directory not found: $BACKUP_DIR" +log "Using backup directory: $BACKUP_DIR" -require_file "$AUTH_DUMP" -require_file "$CHAR_DUMP" +MANIFEST_PATH="$BACKUP_DIR/manifest.json" +if [[ -f "$MANIFEST_PATH" ]]; then + load_manifest "$MANIFEST_PATH" +fi -timestamp(){ date +%Y%m%d_%H%M%S; } +[[ -n "$MYSQL_PW" ]] || fatal "MySQL password is required (use --password)." -backup_db(){ - local db="$1" - local out="manual-backups/${db}-pre-import-$(timestamp).sql" - mkdir -p manual-backups - log "Backing up current $db to $out" - docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$db" > "$out" -} - -restore(){ - local db="$1" - local dump="$2" - log "Importing $dump into $db" - case "$dump" in - *.gz) gzip -dc "$dump" ;; - *.sql) cat "$dump" ;; - *) err "Unsupported dump format: $dump"; exit 1;; - esac | docker exec -i ac-mysql mysql -uroot -p"$MYSQL_PW" "$db" -} +declare -A DUMP_PATHS=() +log "Databases selected: ${ACTIVE_DBS[*]}" +for db in "${ACTIVE_DBS[@]}"; do + if ! dump_path="$(find_dump "$db")"; then + fatal "No dump found for '$db' in $BACKUP_DIR (expected files like acore_${db}.sql or .sql.gz)." + fi + if [[ -z "${DB_NAMES[$db]}" ]]; then + DB_NAMES[$db]="$(guess_schema_from_dump "$dump_path")" + fi + [[ -n "${DB_NAMES[$db]}" ]] || fatal "Missing schema name for '$db'. Provide --${db}-db, include it in manifest.json, or name the dump appropriately." + DUMP_PATHS["$db"]="$dump_path" + log " $db -> ${DB_NAMES[$db]} (using ${dump_path##*/})" +done log "Stopping world/auth services" docker stop ac-worldserver ac-authserver >/dev/null || warn "Services already stopped" -backup_db "$DB_AUTH" -restore "$DB_AUTH" "$AUTH_DUMP" +for db in "${ACTIVE_DBS[@]}"; do + backup_db "${DB_NAMES[$db]}" "$db" + restore "${DB_NAMES[$db]}" "${DUMP_PATHS[$db]}" +done -backup_db "$DB_CHAR" -restore "$DB_CHAR" "$CHAR_DUMP" - -if [[ -n "$WORLD_DUMP" ]]; then - read -rp "World dump detected (${WORLD_DUMP##*/}). Restore it as well? [y/N]: " ANSWER - if [[ "$ANSWER" =~ ^[Yy]$ ]]; then - backup_db "$DB_WORLD" - restore "$DB_WORLD" "$WORLD_DUMP" - else - warn "Skipping world database restore" - fi +if db_selected characters || db_selected world; then + log "Reapplying module SQL patches" + docker compose --profile db --profile modules run --rm \ + --entrypoint /bin/sh ac-modules \ + -c 'apk add --no-cache bash curl >/dev/null && bash /tmp/scripts/manage-modules.sh >/tmp/mm.log && cat /tmp/mm.log' || warn "Module SQL run exited with non-zero status" +else + warn "Skipping module SQL reapply (auth-only import)" fi -log "Reapplying module SQL patches" -docker compose --profile db --profile modules run --rm \ - --entrypoint /bin/sh ac-modules \ - -c 'apk add --no-cache bash curl >/dev/null && bash /tmp/scripts/manage-modules.sh >/tmp/mm.log && cat /tmp/mm.log' || warn "Module SQL run exited with non-zero status" - log "Restarting services to reinitialize GUID generators" docker restart ac-authserver ac-worldserver >/dev/null log "Waiting for services to fully initialize..." sleep 10 -# Wait for services to be healthy for i in {1..30}; do if docker exec ac-worldserver pgrep worldserver >/dev/null 2>&1 && docker exec ac-authserver pgrep authserver >/dev/null 2>&1; then log "Services are running" @@ -160,21 +505,21 @@ for i in {1..30}; do sleep 2 done -count_rows(){ - docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e "$1" -} +if db_selected auth; then + ACCOUNTS=$(count_rows "SELECT COUNT(*) FROM ${DB_NAMES[auth]}.account;") + log "Accounts: $ACCOUNTS" +fi -ACCOUNTS=$(count_rows "SELECT COUNT(*) FROM ${DB_AUTH}.account;") -CHARS=$(count_rows "SELECT COUNT(*) FROM ${DB_CHAR}.characters;") -MAX_GUID=$(count_rows "SELECT COALESCE(MAX(guid), 0) FROM ${DB_CHAR}.characters;") - -log "Accounts: $ACCOUNTS" -log "Characters: $CHARS" -if [ "$CHARS" -gt 0 ]; then - log "Highest character GUID: $MAX_GUID" - log "Next new character will receive GUID: $((MAX_GUID + 1))" +if db_selected characters; then + CHARS=$(count_rows "SELECT COUNT(*) FROM ${DB_NAMES[characters]}.characters;") + log "Characters: $CHARS" + if [ "$CHARS" -gt 0 ]; then + MAX_GUID=$(count_rows "SELECT COALESCE(MAX(guid), 0) FROM ${DB_NAMES[characters]}.characters;") + log "Highest character GUID: $MAX_GUID" + log "Next new character will receive GUID: $((MAX_GUID + 1))" + fi fi ./status.sh --once || warn "status.sh reported issues; inspect manually." -log "Import completed." +log "Import completed for: ${ACTIVE_DBS[*]}" diff --git a/scripts/db-import-conditional.sh b/scripts/db-import-conditional.sh index ebb8f30..dab52df 100755 --- a/scripts/db-import-conditional.sh +++ b/scripts/db-import-conditional.sh @@ -87,7 +87,15 @@ echo "🔧 Starting database import process..." echo "🔍 Checking for backups to restore..." -BACKUP_DIRS="/backups" +# Define backup search paths in priority order +BACKUP_SEARCH_PATHS=( + "/backups" + "/var/lib/mysql-persistent" + "$SCRIPT_DIR/../storage/backups" + "$SCRIPT_DIR/../manual-backups" + "$SCRIPT_DIR/../ImportBackup" +) + backup_path="" echo "🔍 Checking for legacy backup file..." @@ -103,89 +111,180 @@ else echo "🔍 No legacy backup found" fi -if [ -z "$backup_path" ] && [ -d "$BACKUP_DIRS" ]; then - echo "📁 Backup directory exists, checking for timestamped backups..." - if [ -n "$(ls -A "$BACKUP_DIRS" 2>/dev/null)" ]; then - if [ -d "$BACKUP_DIRS/daily" ]; then - echo "🔍 Checking for daily backups..." - latest_daily=$(ls -1t "$BACKUP_DIRS/daily" 2>/dev/null | head -n 1) - if [ -n "$latest_daily" ] && [ -d "$BACKUP_DIRS/daily/$latest_daily" ]; then - echo "đŸ“Ļ Latest daily backup found: $latest_daily" - for backup_file in "$BACKUP_DIRS/daily/$latest_daily"/*.sql.gz; do - if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then - if timeout 10 zcat "$backup_file" 2>/dev/null | head -20 | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then - echo "✅ Valid daily backup file: $(basename "$backup_file")" - backup_path="$BACKUP_DIRS/daily/$latest_daily" - break +# Search through backup directories +if [ -z "$backup_path" ]; then + for BACKUP_DIRS in "${BACKUP_SEARCH_PATHS[@]}"; do + if [ ! -d "$BACKUP_DIRS" ]; then + continue + fi + + echo "📁 Checking backup directory: $BACKUP_DIRS" + if [ -n "$(ls -A "$BACKUP_DIRS" 2>/dev/null)" ]; then + # Check for daily backups first + if [ -d "$BACKUP_DIRS/daily" ]; then + echo "🔍 Checking for daily backups..." + latest_daily=$(ls -1t "$BACKUP_DIRS/daily" 2>/dev/null | head -n 1) + if [ -n "$latest_daily" ] && [ -d "$BACKUP_DIRS/daily/$latest_daily" ]; then + echo "đŸ“Ļ Latest daily backup found: $latest_daily" + for backup_file in "$BACKUP_DIRS/daily/$latest_daily"/*.sql.gz; do + if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then + if timeout 10 zcat "$backup_file" 2>/dev/null | head -20 | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then + echo "✅ Valid daily backup file: $(basename "$backup_file")" + backup_path="$BACKUP_DIRS/daily/$latest_daily" + break 2 + fi + fi + done + fi + fi + + # Check for hourly backups + if [ -z "$backup_path" ] && [ -d "$BACKUP_DIRS/hourly" ]; then + echo "🔍 Checking for hourly backups..." + latest_hourly=$(ls -1t "$BACKUP_DIRS/hourly" 2>/dev/null | head -n 1) + if [ -n "$latest_hourly" ] && [ -d "$BACKUP_DIRS/hourly/$latest_hourly" ]; then + echo "đŸ“Ļ Latest hourly backup found: $latest_hourly" + for backup_file in "$BACKUP_DIRS/hourly/$latest_hourly"/*.sql.gz; do + if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then + if timeout 10 zcat "$backup_file" 2>/dev/null | head -20 | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then + echo "✅ Valid hourly backup file: $(basename "$backup_file")" + backup_path="$BACKUP_DIRS/hourly/$latest_hourly" + break 2 + fi + fi + done + fi + fi + + # Check for timestamped backup directories (like ExportBackup_YYYYMMDD_HHMMSS) + if [ -z "$backup_path" ]; then + echo "🔍 Checking for timestamped backup directories..." + timestamped_backups=$(ls -1t "$BACKUP_DIRS" 2>/dev/null | grep -E '^(ExportBackup_)?[0-9]{8}_[0-9]{6}$' | head -n 1) + if [ -n "$timestamped_backups" ]; then + latest_timestamped="$timestamped_backups" + echo "đŸ“Ļ Found timestamped backup: $latest_timestamped" + if [ -d "$BACKUP_DIRS/$latest_timestamped" ]; then + if ls "$BACKUP_DIRS/$latest_timestamped"/*.sql.gz >/dev/null 2>&1; then + echo "🔍 Validating timestamped backup content..." + for backup_file in "$BACKUP_DIRS/$latest_timestamped"/*.sql.gz; do + if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then + if timeout 10 zcat "$backup_file" 2>/dev/null | head -20 | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then + echo "✅ Valid timestamped backup found: $(basename "$backup_file")" + backup_path="$BACKUP_DIRS/$latest_timestamped" + break 2 + fi + fi + done fi fi - done - else - echo "📅 No daily backup directory found" + fi fi - else - echo "📅 No daily backup directory found" - echo "🔍 Checking for timestamped backup directories..." - timestamped_backups=$(ls -1t $BACKUP_DIRS 2>/dev/null | grep -E '^[0-9]{8}_[0-9]{6}$' | head -n 1) - if [ -n "$timestamped_backups" ]; then - latest_timestamped="$timestamped_backups" - echo "đŸ“Ļ Found timestamped backup: $latest_timestamped" - if [ -d "$BACKUP_DIRS/$latest_timestamped" ]; then - if ls "$BACKUP_DIRS/$latest_timestamped"/*.sql.gz >/dev/null 2>&1; then - echo "🔍 Validating timestamped backup content..." - for backup_file in "$BACKUP_DIRS/$latest_timestamped"/*.sql.gz; do - if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then - if timeout 10 zcat "$backup_file" 2>/dev/null | head -20 | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then - echo "✅ Valid timestamped backup found: $(basename "$backup_file")" - backup_path="$BACKUP_DIRS/$latest_timestamped" - break - fi - fi - done - else - echo "âš ī¸ No .sql.gz files found in timestamped backup directory" + + # Check for manual backups (*.sql files) + if [ -z "$backup_path" ]; then + echo "🔍 Checking for manual backup files..." + latest_manual=$(ls -1t "$BACKUP_DIRS"/*.sql 2>/dev/null | head -n 1) + if [ -n "$latest_manual" ] && [ -f "$latest_manual" ]; then + echo "đŸ“Ļ Found manual backup: $(basename "$latest_manual")" + if timeout 10 head -20 "$latest_manual" 2>/dev/null | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then + echo "✅ Valid manual backup file: $(basename "$latest_manual")" + backup_path="$latest_manual" + break fi fi - else - echo "📅 No timestamped backup directories found" fi fi - else - echo "📁 Backup directory is empty" - fi -else - echo "📁 No backup directory found or legacy backup already selected" + + # If we found a backup in this directory, stop searching + if [ -n "$backup_path" ]; then + break + fi + done fi echo "🔄 Final backup path result: '$backup_path'" if [ -n "$backup_path" ]; then echo "đŸ“Ļ Found backup: $(basename "$backup_path")" - if [ -d "$backup_path" ]; then - echo "🔄 Restoring from backup directory: $backup_path" - restore_success=true - for backup_file in "$backup_path"/*.sql.gz; do - if [ -f "$backup_file" ]; then - if timeout 300 zcat "$backup_file" | mysql -h ${CONTAINER_MYSQL} -u${MYSQL_USER} -p${MYSQL_ROOT_PASSWORD}; then - echo "✅ Restored $(basename "$backup_file")" - else - echo "❌ Failed to restore $(basename "$backup_file")"; restore_success=false - fi + + restore_backup() { + local backup_path="$1" + local restore_success=true + + if [ -d "$backup_path" ]; then + echo "🔄 Restoring from backup directory: $backup_path" + + # Check for manifest file to understand backup structure + if [ -f "$backup_path/manifest.json" ]; then + echo "📋 Found manifest file, checking backup contents..." + cat "$backup_path/manifest.json" fi - done - if [ "$restore_success" = true ]; then - echo "$(date): Backup successfully restored from $backup_path" > "$RESTORE_SUCCESS_MARKER" - exit 0 - else - echo "$(date): Backup restoration failed - proceeding with fresh setup" > "$RESTORE_FAILED_MARKER" - fi - elif [ -f "$backup_path" ]; then - echo "🔄 Restoring from backup file: $backup_path" - if timeout 300 mysql -h ${CONTAINER_MYSQL} -u${MYSQL_USER} -p${MYSQL_ROOT_PASSWORD} < "$backup_path"; then - echo "$(date): Backup successfully restored from $backup_path" > "$RESTORE_SUCCESS_MARKER" - exit 0 - else - echo "$(date): Backup restoration failed - proceeding with fresh setup" > "$RESTORE_FAILED_MARKER" + + # Restore compressed SQL files + if ls "$backup_path"/*.sql.gz >/dev/null 2>&1; then + for backup_file in "$backup_path"/*.sql.gz; do + if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then + echo "🔄 Restoring $(basename "$backup_file")..." + if timeout 300 zcat "$backup_file" | mysql -h ${CONTAINER_MYSQL} -u${MYSQL_USER} -p${MYSQL_ROOT_PASSWORD}; then + echo "✅ Restored $(basename "$backup_file")" + else + echo "❌ Failed to restore $(basename "$backup_file")" + restore_success=false + fi + fi + done + fi + + # Also check for uncompressed SQL files + if ls "$backup_path"/*.sql >/dev/null 2>&1; then + for backup_file in "$backup_path"/*.sql; do + if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then + echo "🔄 Restoring $(basename "$backup_file")..." + if timeout 300 mysql -h ${CONTAINER_MYSQL} -u${MYSQL_USER} -p${MYSQL_ROOT_PASSWORD} < "$backup_file"; then + echo "✅ Restored $(basename "$backup_file")" + else + echo "❌ Failed to restore $(basename "$backup_file")" + restore_success=false + fi + fi + done + fi + + elif [ -f "$backup_path" ]; then + echo "🔄 Restoring from backup file: $backup_path" + case "$backup_path" in + *.gz) + if timeout 300 zcat "$backup_path" | mysql -h ${CONTAINER_MYSQL} -u${MYSQL_USER} -p${MYSQL_ROOT_PASSWORD}; then + echo "✅ Restored compressed backup" + else + echo "❌ Failed to restore compressed backup" + restore_success=false + fi + ;; + *.sql) + if timeout 300 mysql -h ${CONTAINER_MYSQL} -u${MYSQL_USER} -p${MYSQL_ROOT_PASSWORD} < "$backup_path"; then + echo "✅ Restored SQL backup" + else + echo "❌ Failed to restore SQL backup" + restore_success=false + fi + ;; + *) + echo "âš ī¸ Unknown backup file format: $backup_path" + restore_success=false + ;; + esac fi + + return $([ "$restore_success" = true ] && echo 0 || echo 1) + } + + if restore_backup "$backup_path"; then + echo "$(date): Backup successfully restored from $backup_path" > "$RESTORE_SUCCESS_MARKER" + echo "🎉 Backup restoration completed successfully!" + exit 0 + else + echo "$(date): Backup restoration failed - proceeding with fresh setup" > "$RESTORE_FAILED_MARKER" + echo "âš ī¸ Backup restoration failed, will proceed with fresh database setup" fi else echo "â„šī¸ No valid backups found - proceeding with fresh setup" @@ -197,6 +296,7 @@ mysql -h ${CONTAINER_MYSQL} -u${MYSQL_USER} -p${MYSQL_ROOT_PASSWORD} -e " CREATE DATABASE IF NOT EXISTS ${DB_AUTH_NAME} DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; CREATE DATABASE IF NOT EXISTS ${DB_WORLD_NAME} DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; CREATE DATABASE IF NOT EXISTS ${DB_CHARACTERS_NAME} DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; +CREATE DATABASE IF NOT EXISTS acore_playerbots DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; SHOW DATABASES;" || { echo "❌ Failed to create databases"; exit 1; } echo "✅ Fresh databases created - proceeding with schema import"