cleanup locations, ya know noob friendly

This commit is contained in:
uprightbass360
2025-11-03 20:54:20 -05:00
parent 37c96731f7
commit 4cdba16d39
13 changed files with 35 additions and 65 deletions

272
scripts/backup-export.sh Executable file
View File

@@ -0,0 +1,272 @@
#!/bin/bash
# Export one or more ACore databases to ExportBackup_<timestamp>/
set -euo pipefail
INVOCATION_DIR="$PWD"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
SUPPORTED_DBS=(auth characters world)
declare -A SUPPORTED_SET=()
for db in "${SUPPORTED_DBS[@]}"; do
SUPPORTED_SET["$db"]=1
done
declare -A DB_NAMES=([auth]="" [characters]="" [world]="")
declare -a INCLUDE_DBS=()
declare -a SKIP_DBS=()
MYSQL_PW=""
DEST_PARENT=""
DEST_PROVIDED=false
EXPLICIT_SELECTION=false
usage(){
cat <<'EOF'
Usage: ./backup-export.sh [options]
Creates a timestamped backup of one or more ACore databases.
Options:
-o, --output DIR Destination directory (default: storage/backups)
-p, --password PASS MySQL root password
--auth-db NAME Auth database schema name
--characters-db NAME Characters database schema name
--world-db NAME World database schema name
--db LIST Comma-separated list of databases to export
--skip LIST Comma-separated list of databases to skip
-h, --help Show this help and exit
Supported database identifiers: auth, characters, world.
By default exports auth and characters if database names are provided.
Examples:
# Export all databases to default location
./backup-export.sh --password azerothcore123 --auth-db acore_auth --characters-db acore_characters --world-db acore_world --all
# Export specific databases to custom directory
./backup-export.sh --output /path/to/backups --password azerothcore123 --db auth,characters --auth-db acore_auth --characters-db acore_characters
# Export only world database
./backup-export.sh --password azerothcore123 --db world --world-db acore_world
EOF
}
err(){ printf 'Error: %s\n' "$*" >&2; }
die(){ err "$1"; exit 1; }
normalize_token(){
printf '%s' "$1" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]'
}
add_unique(){
local -n arr="$1"
local value="$2"
for existing in "${arr[@]:-}"; do
[[ "$existing" == "$value" ]] && return
done
arr+=("$value")
}
parse_db_list(){
local -n target="$1"
local value="$2"
IFS=',' read -ra parts <<<"$value"
for part in "${parts[@]}"; do
local token
token="$(normalize_token "$part")"
[[ -z "$token" ]] && continue
if [[ -z "${SUPPORTED_SET[$token]:-}" ]]; then
die "Unknown database identifier: $token (supported: ${SUPPORTED_DBS[*]})"
fi
add_unique target "$token"
done
}
remove_from_list(){
local -n arr="$1"
local value="$2"
local -a filtered=()
for item in "${arr[@]}"; do
[[ "$item" == "$value" ]] || filtered+=("$item")
done
arr=("${filtered[@]}")
}
resolve_relative(){
local base="$1" path="$2"
if command -v python3 >/dev/null 2>&1; then
python3 - "$base" "$path" <<'PY'
import os, sys
base, path = sys.argv[1:3]
if not path:
print(os.path.abspath(base))
elif os.path.isabs(path):
print(os.path.normpath(path))
else:
print(os.path.normpath(os.path.join(base, path)))
PY
else
die "python3 is required but was not found on PATH"
fi
}
json_string(){
if ! command -v python3 >/dev/null 2>&1; then
die "python3 is required but was not found on PATH"
fi
python3 - "$1" <<'PY'
import json, sys
print(json.dumps(sys.argv[1]))
PY
}
POSITIONAL=()
while [[ $# -gt 0 ]]; do
case "$1" in
-o|--output)
[[ $# -ge 2 ]] || die "--output requires a directory argument"
DEST_PARENT="$2"
DEST_PROVIDED=true
shift 2
;;
-p|--password)
[[ $# -ge 2 ]] || die "--password requires a value"
MYSQL_PW="$2"
shift 2
;;
--auth-db)
[[ $# -ge 2 ]] || die "--auth-db requires a value"
DB_NAMES[auth]="$2"
shift 2
;;
--characters-db)
[[ $# -ge 2 ]] || die "--characters-db requires a value"
DB_NAMES[characters]="$2"
shift 2
;;
--world-db)
[[ $# -ge 2 ]] || die "--world-db requires a value"
DB_NAMES[world]="$2"
shift 2
;;
--db|--only)
[[ $# -ge 2 ]] || die "--db requires a value"
EXPLICIT_SELECTION=true
parse_db_list INCLUDE_DBS "$2"
shift 2
;;
--skip)
[[ $# -ge 2 ]] || die "--skip requires a value"
parse_db_list SKIP_DBS "$2"
shift 2
;;
-h|--help)
usage
exit 0
;;
--)
shift
while [[ $# -gt 0 ]]; do
POSITIONAL+=("$1")
shift
done
break
;;
-*)
die "Unknown option: $1"
;;
*)
POSITIONAL+=("$1")
shift
;;
esac
done
if ((${#POSITIONAL[@]} > 0)); then
die "Positional arguments are not supported. Use named options instead."
fi
declare -a ACTIVE_DBS=()
if $EXPLICIT_SELECTION; then
ACTIVE_DBS=("${INCLUDE_DBS[@]}")
else
for db in "${SUPPORTED_DBS[@]}"; do
if [[ -n "${DB_NAMES[$db]}" ]]; then
add_unique ACTIVE_DBS "$db"
fi
done
if ((${#ACTIVE_DBS[@]} == 0)); then
ACTIVE_DBS=(auth characters)
fi
fi
for skip in "${SKIP_DBS[@]:-}"; do
remove_from_list ACTIVE_DBS "$skip"
done
if ((${#ACTIVE_DBS[@]} == 0)); then
die "No databases selected for export."
fi
[[ -n "$MYSQL_PW" ]] || die "MySQL password is required (use --password)."
for db in "${ACTIVE_DBS[@]}"; do
case "$db" in
auth|characters|world) ;;
*) die "Unsupported database identifier requested: $db" ;;
esac
if [[ -z "${DB_NAMES[$db]}" ]]; then
die "Missing schema name for '$db'. Provide --${db}-db."
fi
done
if $DEST_PROVIDED; then
DEST_PARENT="$(resolve_relative "$INVOCATION_DIR" "$DEST_PARENT")"
else
# Use storage/backups as default to align with existing backup structure
if [ -d "$SCRIPT_DIR/storage" ]; then
DEST_PARENT="$SCRIPT_DIR/storage/backups"
mkdir -p "$DEST_PARENT"
else
DEST_PARENT="$SCRIPT_DIR"
fi
fi
TIMESTAMP="$(date +%Y%m%d_%H%M%S)"
DEST_DIR="$(printf '%s/ExportBackup_%s' "$DEST_PARENT" "$TIMESTAMP")"
mkdir -p "$DEST_DIR"
generated_at="$(date --iso-8601=seconds)"
dump_db(){
local schema="$1" outfile="$2"
echo "Dumping ${schema} -> ${outfile}"
docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$schema" | gzip > "$outfile"
}
for db in "${ACTIVE_DBS[@]}"; do
outfile="$DEST_DIR/acore_${db}.sql.gz"
dump_db "${DB_NAMES[$db]}" "$outfile"
done
first=1
{
printf '{\n'
printf ' "generated_at": %s,\n' "$(json_string "$generated_at")"
printf ' "databases": {\n'
for db in "${ACTIVE_DBS[@]}"; do
key_json="$(json_string "$db")"
value_json="$(json_string "${DB_NAMES[$db]}")"
if (( first )); then
first=0
else
printf ',\n'
fi
printf ' %s: %s' "$key_json" "$value_json"
done
printf '\n }\n'
printf '}\n'
} > "$DEST_DIR/manifest.json"
echo "Exported databases: ${ACTIVE_DBS[*]}"
echo "Backups saved under $DEST_DIR"

473
scripts/backup-import.sh Executable file
View File

@@ -0,0 +1,473 @@
#!/bin/bash
# Restore one or more ACore databases from a backup directory.
set -euo pipefail
INVOCATION_DIR="$PWD"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
COLOR_RED='\033[0;31m'
COLOR_GREEN='\033[0;32m'
COLOR_YELLOW='\033[1;33m'
COLOR_RESET='\033[0m'
log(){ printf '%b\n' "${COLOR_GREEN}$*${COLOR_RESET}"; }
warn(){ printf '%b\n' "${COLOR_YELLOW}$*${COLOR_RESET}"; }
err(){ printf '%b\n' "${COLOR_RED}$*${COLOR_RESET}"; }
fatal(){ err "$*"; exit 1; }
SUPPORTED_DBS=(auth characters world)
declare -A SUPPORTED_SET=()
for db in "${SUPPORTED_DBS[@]}"; do
SUPPORTED_SET["$db"]=1
done
declare -A DB_NAMES=([auth]="" [characters]="" [world]="")
declare -a INCLUDE_DBS=()
declare -a SKIP_DBS=()
declare -a ACTIVE_DBS=()
MYSQL_PW=""
BACKUP_DIR=""
BACKUP_PROVIDED=false
EXPLICIT_SELECTION=false
usage(){
cat <<'EOF'
Usage: ./backup-import.sh [options]
Restores selected ACore databases from a backup directory.
Options:
-b, --backup-dir DIR Backup directory (required)
-p, --password PASS MySQL root password
--auth-db NAME Auth database schema name
--characters-db NAME Characters database schema name
--world-db NAME World database schema name
--db LIST Comma-separated list of databases to import
--skip LIST Comma-separated list of databases to skip
--all Import all supported databases
-h, --help Show this help and exit
Supported database identifiers: auth, characters, world.
By default the script restores auth and characters databases.
Examples:
# Restore from specific backup directory
./backup-import.sh --backup-dir /path/to/backup --password azerothcore123 --auth-db acore_auth --characters-db acore_characters
# Restore all databases
./backup-import.sh --backup-dir ./storage/backups/ExportBackup_20241029_120000 --password azerothcore123 --all --auth-db acore_auth --characters-db acore_characters --world-db acore_world
# Restore only world database
./backup-import.sh --backup-dir ./backups/daily/latest --password azerothcore123 --db world --world-db acore_world
EOF
}
normalize_token(){
printf '%s' "$1" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]'
}
add_unique(){
local -n arr="$1"
local value="$2"
for existing in "${arr[@]:-}"; do
[[ "$existing" == "$value" ]] && return
done
arr+=("$value")
}
parse_db_list(){
local -n target="$1"
local value="$2"
IFS=',' read -ra parts <<<"$value"
for part in "${parts[@]}"; do
local token
token="$(normalize_token "$part")"
[[ -z "$token" ]] && continue
if [[ -z "${SUPPORTED_SET[$token]:-}" ]]; then
fatal "Unknown database identifier: $token (supported: ${SUPPORTED_DBS[*]})"
fi
add_unique target "$token"
done
}
remove_from_list(){
local -n arr="$1"
local value="$2"
local -a filtered=()
for item in "${arr[@]}"; do
[[ "$item" == "$value" ]] || filtered+=("$item")
done
arr=("${filtered[@]}")
}
resolve_relative(){
local base="$1" path="$2"
if command -v python3 >/dev/null 2>&1; then
python3 - "$base" "$path" <<'PY'
import os, sys
base, path = sys.argv[1:3]
if not path:
print(os.path.abspath(base))
elif os.path.isabs(path):
print(os.path.normpath(path))
else:
print(os.path.normpath(os.path.join(base, path)))
PY
else
fatal "python3 is required but was not found on PATH"
fi
}
load_manifest(){
local path="$1"
[[ -f "$path" ]] || return 0
if ! command -v python3 >/dev/null 2>&1; then
fatal "python3 is required to read $path"
fi
while IFS='=' read -r key value; do
[[ -n "$key" && -n "$value" ]] || continue
local token
token="$(normalize_token "$key")"
[[ -n "${SUPPORTED_SET[$token]:-}" ]] || continue
if [[ -z "${DB_NAMES[$token]}" ]]; then
DB_NAMES[$token]="$value"
fi
done < <(python3 - "$path" <<'PY'
import json, sys
SUPPORTED = {
"auth": {"keys": {"auth"}, "schemas": {"acore_auth"}},
"characters": {"keys": {"characters", "chars", "char"}, "schemas": {"acore_characters"}},
"world": {"keys": {"world"}, "schemas": {"acore_world"}},
}
def map_entry(key, value, result):
if key and key in SUPPORTED:
result[key] = value
return
value_lower = value.lower()
for ident, meta in SUPPORTED.items():
if value_lower in meta["schemas"]:
result.setdefault(ident, value)
return
if key:
for ident, meta in SUPPORTED.items():
if key in meta["keys"]:
result.setdefault(ident, value)
return
def main():
path = sys.argv[1]
with open(path, "r", encoding="utf-8") as fh:
data = json.load(fh)
result = {}
databases = data.get("databases")
if isinstance(databases, dict):
for key, value in databases.items():
map_entry(key.lower(), str(value), result)
elif isinstance(databases, list):
for value in databases:
map_entry("", str(value), result)
for key, value in result.items():
print(f"{key}={value}")
if __name__ == "__main__":
main()
PY
)
}
find_dump(){
local db="$1"
local hint="${DB_NAMES[$db]}"
if ! command -v python3 >/dev/null 2>&1; then
fatal "python3 is required to locate backup dumps"
fi
python3 - "$BACKUP_DIR" "$db" "$hint" <<'PY'
import glob, os, sys
backup_dir, db, hint = sys.argv[1:4]
# Search patterns for database dumps
patterns = [
f"acore_{db}.sql.gz",
f"acore_{db}.sql",
f"{db}.sql.gz",
f"{db}.sql",
]
if hint:
patterns = [f"{hint}.sql.gz", f"{hint}.sql"] + patterns
# Search locations (in order of preference)
search_dirs = []
# Check for daily backups first (most recent)
daily_dir = os.path.join(backup_dir, "daily")
if os.path.isdir(daily_dir):
daily_subdirs = [d for d in os.listdir(daily_dir) if os.path.isdir(os.path.join(daily_dir, d))]
if daily_subdirs:
latest_daily = max(daily_subdirs, key=lambda x: os.path.getmtime(os.path.join(daily_dir, x)))
search_dirs.append(os.path.join(daily_dir, latest_daily))
# Check for hourly backups
hourly_dir = os.path.join(backup_dir, "hourly")
if os.path.isdir(hourly_dir):
hourly_subdirs = [d for d in os.listdir(hourly_dir) if os.path.isdir(os.path.join(hourly_dir, d))]
if hourly_subdirs:
latest_hourly = max(hourly_subdirs, key=lambda x: os.path.getmtime(os.path.join(hourly_dir, x)))
search_dirs.append(os.path.join(hourly_dir, latest_hourly))
# Check for timestamped backup directories
timestamped_dirs = []
try:
for item in os.listdir(backup_dir):
item_path = os.path.join(backup_dir, item)
if os.path.isdir(item_path):
# Match ExportBackup_YYYYMMDD_HHMMSS or just YYYYMMDD_HHMMSS
if item.startswith("ExportBackup_") or (len(item) == 15 and item[8] == '_'):
timestamped_dirs.append(item_path)
except OSError:
pass
if timestamped_dirs:
latest_timestamped = max(timestamped_dirs, key=os.path.getmtime)
search_dirs.append(latest_timestamped)
# Add the main backup directory itself
search_dirs.append(backup_dir)
# Search for matching dumps
seen = {}
matches = []
for search_dir in search_dirs:
for pattern in patterns:
for path in glob.glob(os.path.join(search_dir, pattern)):
if path not in seen and os.path.isfile(path):
seen[path] = True
matches.append(path)
if not matches:
sys.exit(1)
# Return the most recent match
latest = max(matches, key=os.path.getmtime)
print(latest)
PY
}
guess_schema_from_dump(){
local dump="$1"
local base
base="$(basename "$dump")"
case "$base" in
acore_auth.sql|acore_auth.sql.gz) echo "acore_auth" ;;
acore_characters.sql|acore_characters.sql.gz) echo "acore_characters" ;;
acore_world.sql|acore_world.sql.gz) echo "acore_world" ;;
*)
if [[ "$base" =~ ^([A-Za-z0-9_-]+)\.sql(\.gz)?$ ]]; then
echo "${BASH_REMATCH[1]}"
fi
;;
esac
}
timestamp(){ date +%Y%m%d_%H%M%S; }
backup_db(){
local schema="$1" label="$2"
local out="manual-backups/${label}-pre-import-$(timestamp).sql"
mkdir -p manual-backups
log "Backing up current ${schema} to ${out}"
docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$schema" > "$out"
}
restore(){
local schema="$1" dump="$2"
log "Importing ${dump##*/} into ${schema}"
case "$dump" in
*.gz) gzip -dc "$dump" ;;
*.sql) cat "$dump" ;;
*) fatal "Unsupported dump format: $dump" ;;
esac | docker exec -i ac-mysql mysql -uroot -p"$MYSQL_PW" "$schema"
}
db_selected(){
local needle="$1"
for item in "${ACTIVE_DBS[@]}"; do
[[ "$item" == "$needle" ]] && return 0
done
return 1
}
count_rows(){
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e "$1"
}
case "${1:-}" in
-h|--help) usage; exit 0;;
esac
POSITIONAL=()
while [[ $# -gt 0 ]]; do
case "$1" in
-b|--backup-dir)
[[ $# -ge 2 ]] || fatal "--backup-dir requires a directory argument"
BACKUP_DIR="$2"
BACKUP_PROVIDED=true
shift 2
;;
-p|--password)
[[ $# -ge 2 ]] || fatal "--password requires a value"
MYSQL_PW="$2"
shift 2
;;
--auth-db)
[[ $# -ge 2 ]] || fatal "--auth-db requires a value"
DB_NAMES[auth]="$2"
shift 2
;;
--characters-db)
[[ $# -ge 2 ]] || fatal "--characters-db requires a value"
DB_NAMES[characters]="$2"
shift 2
;;
--world-db)
[[ $# -ge 2 ]] || fatal "--world-db requires a value"
DB_NAMES[world]="$2"
shift 2
;;
--db|--only)
[[ $# -ge 2 ]] || fatal "--db requires a value"
EXPLICIT_SELECTION=true
parse_db_list INCLUDE_DBS "$2"
shift 2
;;
--skip)
[[ $# -ge 2 ]] || fatal "--skip requires a value"
parse_db_list SKIP_DBS "$2"
shift 2
;;
--all)
EXPLICIT_SELECTION=true
for db in "${SUPPORTED_DBS[@]}"; do
add_unique INCLUDE_DBS "$db"
done
shift
;;
-h|--help)
usage
exit 0
;;
--)
shift
while [[ $# -gt 0 ]]; do
POSITIONAL+=("$1")
shift
done
break
;;
-*)
fatal "Unknown option: $1"
;;
*)
POSITIONAL+=("$1")
shift
;;
esac
done
if ((${#POSITIONAL[@]} > 0)); then
fatal "Positional arguments are not supported. Use named options instead."
fi
if $EXPLICIT_SELECTION; then
ACTIVE_DBS=("${INCLUDE_DBS[@]}")
else
ACTIVE_DBS=(auth characters)
fi
for skip in "${SKIP_DBS[@]:-}"; do
remove_from_list ACTIVE_DBS "$skip"
done
if ((${#ACTIVE_DBS[@]} == 0)); then
fatal "No databases selected for import."
fi
if $BACKUP_PROVIDED; then
BACKUP_DIR="$(resolve_relative "$INVOCATION_DIR" "$BACKUP_DIR")"
else
fatal "Backup directory is required. Use --backup-dir DIR to specify."
fi
[[ -d "$BACKUP_DIR" ]] || fatal "Backup directory not found: $BACKUP_DIR"
log "Using backup directory: $BACKUP_DIR"
MANIFEST_PATH="$BACKUP_DIR/manifest.json"
if [[ -f "$MANIFEST_PATH" ]]; then
load_manifest "$MANIFEST_PATH"
fi
[[ -n "$MYSQL_PW" ]] || fatal "MySQL password is required (use --password)."
declare -A DUMP_PATHS=()
log "Databases selected: ${ACTIVE_DBS[*]}"
for db in "${ACTIVE_DBS[@]}"; do
if ! dump_path="$(find_dump "$db")"; then
fatal "No dump found for '$db' in $BACKUP_DIR (expected files like acore_${db}.sql or .sql.gz)."
fi
if [[ -z "${DB_NAMES[$db]}" ]]; then
DB_NAMES[$db]="$(guess_schema_from_dump "$dump_path")"
fi
[[ -n "${DB_NAMES[$db]}" ]] || fatal "Missing schema name for '$db'. Provide --${db}-db, include it in manifest.json, or name the dump appropriately."
DUMP_PATHS["$db"]="$dump_path"
log " $db -> ${DB_NAMES[$db]} (using ${dump_path##*/})"
done
log "Stopping world/auth services"
docker stop ac-worldserver ac-authserver >/dev/null || warn "Services already stopped"
for db in "${ACTIVE_DBS[@]}"; do
backup_db "${DB_NAMES[$db]}" "$db"
restore "${DB_NAMES[$db]}" "${DUMP_PATHS[$db]}"
done
log "Module SQL patches will be applied when services restart"
log "Restarting services to reinitialize GUID generators"
docker restart ac-authserver ac-worldserver >/dev/null
log "Waiting for services to fully initialize..."
sleep 10
for i in {1..30}; do
if docker exec ac-worldserver pgrep worldserver >/dev/null 2>&1 && docker exec ac-authserver pgrep authserver >/dev/null 2>&1; then
log "Services are running"
break
fi
if [ $i -eq 30 ]; then
warn "Services took longer than expected to start"
fi
sleep 2
done
if db_selected auth; then
ACCOUNTS=$(count_rows "SELECT COUNT(*) FROM ${DB_NAMES[auth]}.account;")
log "Accounts: $ACCOUNTS"
fi
if db_selected characters; then
CHARS=$(count_rows "SELECT COUNT(*) FROM ${DB_NAMES[characters]}.characters;")
log "Characters: $CHARS"
if [ "$CHARS" -gt 0 ]; then
MAX_GUID=$(count_rows "SELECT COALESCE(MAX(guid), 0) FROM ${DB_NAMES[characters]}.characters;")
log "Highest character GUID: $MAX_GUID"
log "Next new character will receive GUID: $((MAX_GUID + 1))"
fi
fi
./status.sh --once || warn "status.sh reported issues; inspect manually."
log "Import completed for: ${ACTIVE_DBS[*]}"

1041
scripts/backup-merge.sh Executable file

File diff suppressed because it is too large Load Diff

39
scripts/start-containers.sh Executable file
View File

@@ -0,0 +1,39 @@
#!/bin/bash
# Thin wrapper to bring the AzerothCore stack online without triggering rebuilds.
# Picks the right profile automatically (standard/playerbots/modules) and delegates
# to deploy.sh so all staging/health logic stays consistent.
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROFILE="$(python3 - <<'PY' "$ROOT_DIR"
import json, subprocess, sys
from pathlib import Path
root = Path(sys.argv[1])
modules_py = root / "scripts" / "modules.py"
env_path = root / ".env"
manifest_path = root / "config" / "modules.json"
state = json.loads(subprocess.check_output([
sys.executable,
str(modules_py),
"--env-path", str(env_path),
"--manifest", str(manifest_path),
"dump", "--format", "json",
]))
enabled = [m for m in state["modules"] if m["enabled"]]
profile = "standard"
if any(m["key"] == "MODULE_PLAYERBOTS" and m["enabled"] for m in enabled):
profile = "playerbots"
elif any(m["needs_build"] and m["enabled"] for m in enabled):
profile = "modules"
print(profile)
PY
)"
exec "${ROOT_DIR}/deploy.sh" --profile "$PROFILE" --yes --no-watch

9
scripts/stop-containers.sh Executable file
View File

@@ -0,0 +1,9 @@
#!/bin/bash
# Thin wrapper to stop all AzerothCore project containers while preserving data.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
exec "${SCRIPT_DIR}/cleanup.sh" --soft --force

188
scripts/verify-deployment.sh Executable file
View File

@@ -0,0 +1,188 @@
#!/bin/bash
# Project: ac-compose
set -e
# Simple profile-aware deploy + health check for profiles-verify/docker-compose.yml
BLUE='\033[0;34m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; RED='\033[0;31m'; NC='\033[0m'
info(){ echo -e "${BLUE} $*${NC}"; }
ok(){ echo -e "${GREEN}$*${NC}"; }
warn(){ echo -e "${YELLOW}⚠️ $*${NC}"; }
err(){ echo -e "${RED}$*${NC}"; }
COMPOSE_FILE="$(dirname "$0")/docker-compose.yml"
ENV_FILE=""
PROFILES=(db services-standard client-data modules tools)
SKIP_DEPLOY=false
QUICK=false
usage(){
cat <<EOF
Usage: $0 [--profiles p1,p2,...] [--env-file path] [--skip-deploy] [--quick]
Default profiles: db,services-standard,client-data,modules,tools
Examples:
$0 --profiles db,services-standard,client-data --env-file ./services.env
$0 --profiles db,services-playerbots,client-data-bots,modules,tools
EOF
}
while [[ $# -gt 0 ]]; do
case "$1" in
--profiles) IFS=',' read -r -a PROFILES <<< "$2"; shift 2;;
--env-file) ENV_FILE="$2"; shift 2;;
--skip-deploy) SKIP_DEPLOY=true; shift;;
--quick) QUICK=true; shift;;
-h|--help) usage; exit 0;;
*) err "Unknown arg: $1"; usage; exit 1;;
esac
done
resolve_project_name(){
local env_path
if [ -n "$ENV_FILE" ]; then
env_path="$ENV_FILE"
else
env_path="$(dirname "$COMPOSE_FILE")/.env"
fi
local raw_name=""
if [ -f "$env_path" ]; then
raw_name="$(grep -E '^COMPOSE_PROJECT_NAME=' "$env_path" | tail -n1 | cut -d'=' -f2-)"
fi
if [ -z "$raw_name" ]; then
raw_name="acore-compose"
fi
local sanitized
sanitized="$(echo "$raw_name" | tr '[:upper:]' '[:lower:]')"
sanitized="${sanitized// /-}"
sanitized="$(echo "$sanitized" | tr -cd 'a-z0-9_-')"
if [[ -z "$sanitized" ]]; then
sanitized="acore-compose"
elif [[ ! "$sanitized" =~ ^[a-z0-9] ]]; then
sanitized="ac${sanitized}"
fi
echo "$sanitized"
}
run_compose(){
local compose_args=()
local project_name
project_name="$(resolve_project_name)"
compose_args+=(--project-name "$project_name")
if [ -n "$ENV_FILE" ]; then
compose_args+=(--env-file "$ENV_FILE")
fi
compose_args+=(-f "$COMPOSE_FILE")
docker compose "${compose_args[@]}" "$@"
}
env_file_path(){
if [ -n "$ENV_FILE" ]; then
echo "$ENV_FILE"
else
echo "$(dirname "$COMPOSE_FILE")/.env"
fi
}
read_env_value(){
local key="$1" default="${2:-}"
local env_path value
env_path="$(env_file_path)"
if [ -f "$env_path" ]; then
value="$(grep -E "^${key}=" "$env_path" | tail -n1 | cut -d'=' -f2- | tr -d '\r')"
fi
if [ -z "$value" ]; then
value="$default"
fi
echo "$value"
}
handle_auto_rebuild(){
local storage_path
storage_path="$(read_env_value STORAGE_PATH_LOCAL "./local-storage")"
if [[ "$storage_path" != /* ]]; then
# Remove leading ./ if present
storage_path="${storage_path#./}"
storage_path="$(dirname "$COMPOSE_FILE")/$storage_path"
fi
local sentinel="$storage_path/modules/.requires_rebuild"
[ -f "$sentinel" ] || return 0
info "Module rebuild required (detected $(realpath "$sentinel" 2>/dev/null || echo "$sentinel"))."
local auto_rebuild
auto_rebuild="$(read_env_value AUTO_REBUILD_ON_DEPLOY "0")"
if [ "$auto_rebuild" != "1" ]; then
warn "Run ./scripts/rebuild-with-modules.sh after preparing your source tree."
return 0
fi
local rebuild_source
rebuild_source="$(read_env_value MODULES_REBUILD_SOURCE_PATH "")"
info "AUTO_REBUILD_ON_DEPLOY=1; invoking ./scripts/rebuild-with-modules.sh."
local cmd=(./scripts/rebuild-with-modules.sh --yes)
if [ -n "$rebuild_source" ]; then
cmd+=(--source "$rebuild_source")
fi
if "${cmd[@]}"; then
info "Module rebuild completed."
else
warn "Automatic rebuild failed; run ./scripts/rebuild-with-modules.sh manually."
fi
}
check_health(){
local name="$1"
local status=$(docker inspect --format='{{.State.Health.Status}}' "$name" 2>/dev/null || echo "no-health-check")
if [ "$status" = "healthy" ]; then ok "$name: healthy"; return 0; fi
if docker ps --format '{{.Names}}' | grep -q "^${name}$"; then ok "$name: running"; return 0; fi
err "$name: not running"; return 1
}
wait_log(){
local name="$1"; local needle="$2"; local attempts="${3:-360}"; local interval=5
info "Waiting for $name log: '$needle' ... (timeout: $((attempts*interval))s)"
for i in $(seq 1 "$attempts"); do
if docker logs "$name" 2>/dev/null | grep -q "$needle"; then ok "$name ready"; return 0; fi
sleep "$interval"
done
warn "$name did not report '$needle'"
return 1
}
deploy(){
info "Deploying profiles: ${PROFILES[*]}"
local args=()
for p in "${PROFILES[@]}"; do args+=(--profile "$p"); done
run_compose "${args[@]}" up -d
}
health_checks(){
info "Checking container health"
local failures=0
check_health ac-mysql || ((failures++))
check_health ac-authserver || ((failures++))
check_health ac-worldserver || ((failures++))
if [ "$QUICK" = false ]; then
info "Port checks"
for port in 64306 3784 8215 7778 8081 4201; do
if timeout 3 bash -c "</dev/tcp/127.0.0.1/$port" 2>/dev/null; then ok "port $port: open"; else warn "port $port: closed"; fi
done
fi
if [ $failures -eq 0 ]; then ok "All core services healthy"; else err "$failures service checks failed"; return 1; fi
}
main(){
if [ "$SKIP_DEPLOY" = false ]; then
deploy
# Wait for client-data completion if profile active
if printf '%s\n' "${PROFILES[@]}" | grep -q '^client-data$\|^client-data-bots$'; then
wait_log ac-client-data "Game data setup complete" || true
fi
# Give worldserver time to boot
sleep 10
fi
health_checks
handle_auto_rebuild
info "Endpoints: MySQL:64306, Auth:3784, World:8215, SOAP:7778, phpMyAdmin:8081, Keira3:4201"
}
main "$@"