From ea908dbbcf9fac4f7b3dfa5073ceee0587ddf5fe Mon Sep 17 00:00:00 2001 From: uprightbass360 Date: Sun, 16 Nov 2025 01:32:41 -0500 Subject: [PATCH] refactor module db importing --- README.md | 2 + config/module-manifest.json | 20 +- docker-compose.yml | 8 +- docs/SCRIPTS.md | 16 + scripts/bash/backup-status.sh | 421 ++++++++++++++++++ scripts/bash/db-health-check.sh | 389 ++++++++++++++++ scripts/bash/db-import-conditional.sh | 72 ++- scripts/bash/manage-modules.sh | 96 +++- scripts/bash/stage-modules.sh | 83 +++- scripts/bash/test-phase1-integration.sh | 315 +++++++++++++ scripts/bash/verify-sql-updates.sh | 348 +++++++++++++++ .../__pycache__/modules.cpython-312.pyc | Bin 0 -> 31659 bytes .../update_module_manifest.cpython-312.pyc | Bin 0 -> 15209 bytes scripts/python/modules.py | 83 ++++ scripts/python/update_module_manifest.py | 298 +++++++++++++ 15 files changed, 2120 insertions(+), 31 deletions(-) create mode 100755 scripts/bash/backup-status.sh create mode 100755 scripts/bash/db-health-check.sh create mode 100755 scripts/bash/test-phase1-integration.sh create mode 100755 scripts/bash/verify-sql-updates.sh create mode 100644 scripts/python/__pycache__/modules.cpython-312.pyc create mode 100644 scripts/python/__pycache__/update_module_manifest.cpython-312.pyc create mode 100755 scripts/python/update_module_manifest.py diff --git a/README.md b/README.md index c0fae5d..3662225 100644 --- a/README.md +++ b/README.md @@ -109,6 +109,8 @@ For complete spawn commands, coordinates, and functionality details, see **[docs For common workflows, management commands, and database operations, see **[docs/GETTING_STARTED.md](docs/GETTING_STARTED.md)**. +- Keep the module catalog current with `scripts/python/update_module_manifest.py` or trigger the scheduled **Sync Module Manifest** GitHub Action to auto-open a PR with the latest AzerothCore topic repos. + --- ## Advanced Configuration diff --git a/config/module-manifest.json b/config/module-manifest.json index e608a91..f4f4d96 100644 --- a/config/module-manifest.json +++ b/config/module-manifest.json @@ -73,7 +73,9 @@ "config_cleanup": [ "mod_ahbot.conf*" ], - "category": "economy" + "category": "economy", + "status": "blocked", + "block_reason": "Linker error: Missing Addmod_ahbotScripts() function (use MODULE_LUA_AH_BOT instead)" }, { "key": "MODULE_AUTOBALANCE", @@ -343,6 +345,8 @@ "name": "mod-quest-count-level", "repo": "https://github.com/michaeldelago/mod-quest-count-level.git", "type": "cpp", + "status": "blocked", + "block_reason": "Uses removed ConfigMgr::GetBoolDefault API; fails to compile on modern cores", "post_install_hooks": [], "config_cleanup": [ "levelGrant.conf*" @@ -399,9 +403,11 @@ "name": "mod-challenge-modes", "repo": "https://github.com/ZhengPeiRu21/mod-challenge-modes.git", "type": "cpp", + "block_reason": "Compilation error: Override signature mismatch on OnGiveXP", "post_install_hooks": [], "description": "Implements keystone-style timed runs with leaderboards and scaling modifiers", - "category": "gameplay-enhancement" + "category": "gameplay-enhancement", + "status": "blocked" }, { "key": "MODULE_OLLAMA_CHAT", @@ -475,8 +481,10 @@ "repo": "https://github.com/azerothcore/mod-azerothshard.git", "type": "cpp", "post_install_hooks": [], + "block_reason": "Compilation error: Method name mismatch (getLevel vs GetLevel)", "description": "Bundles AzerothShard tweaks: utility NPCs, scripted events, and gameplay improvements", - "category": "content" + "category": "content", + "status": "blocked" }, { "key": "MODULE_WORGOBLIN", @@ -680,7 +688,9 @@ "copy-standard-lua" ], "description": "Enables multiple NPC merchants with database integration", - "category": "npc-service" + "category": "npc-service", + "status": "blocked", + "block_reason": "Linker error: Missing script loader function" }, { "key": "MODULE_TREASURE_CHEST_SYSTEM", @@ -1151,4 +1161,4 @@ "category": "progression" } ] -} +} \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 8f49eff..ec35cb4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -400,7 +400,7 @@ services: AC_LOGIN_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_AUTH_NAME}" AC_WORLD_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_WORLD_NAME}" AC_CHARACTER_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_CHARACTERS_NAME}" - AC_UPDATES_ENABLE_DATABASES: "0" + AC_UPDATES_ENABLE_DATABASES: "7" AC_BIND_IP: "0.0.0.0" AC_DATA_DIR: "/azerothcore/data" AC_SOAP_PORT: "7878" @@ -489,7 +489,7 @@ services: condition: service_completed_successfully environment: AC_LOGIN_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_AUTH_NAME}" - AC_UPDATES_ENABLE_DATABASES: "0" + AC_UPDATES_ENABLE_DATABASES: "7" AC_BIND_IP: "0.0.0.0" AC_LOG_LEVEL: "1" AC_LOGGER_ROOT_CONFIG: "1,Console" @@ -526,7 +526,7 @@ services: AC_LOGIN_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_AUTH_NAME}" AC_WORLD_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_WORLD_NAME}" AC_CHARACTER_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_CHARACTERS_NAME}" - AC_UPDATES_ENABLE_DATABASES: "0" + AC_UPDATES_ENABLE_DATABASES: "7" AC_BIND_IP: "0.0.0.0" AC_DATA_DIR: "/azerothcore/data" AC_SOAP_PORT: "7878" @@ -579,7 +579,7 @@ services: AC_LOGIN_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_AUTH_NAME}" AC_WORLD_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_WORLD_NAME}" AC_CHARACTER_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_CHARACTERS_NAME}" - AC_UPDATES_ENABLE_DATABASES: "0" + AC_UPDATES_ENABLE_DATABASES: "7" AC_BIND_IP: "0.0.0.0" AC_DATA_DIR: "/azerothcore/data" AC_SOAP_PORT: "7878" diff --git a/docs/SCRIPTS.md b/docs/SCRIPTS.md index c2645a4..124e514 100644 --- a/docs/SCRIPTS.md +++ b/docs/SCRIPTS.md @@ -182,6 +182,22 @@ Central module registry and management system: This centralized approach eliminates duplicate module definitions across scripts. +#### `scripts/python/update_module_manifest.py` - GitHub Topic Sync +Automates manifest population directly from the official AzerothCore GitHub topics. + +```bash +# Preview new modules across all default topics +python3 scripts/python/update_module_manifest.py --dry-run --log + +# Update config/module-manifest.json with latest repos (requires GITHUB_TOKEN) +GITHUB_TOKEN=ghp_yourtoken python3 scripts/python/update_module_manifest.py --refresh-existing +``` + +- Queries `azerothcore-module`, `azerothcore-lua`, `azerothcore-sql`, `azerothcore-tools`, and `azerothcore-module+ac-premium` +- Merges new repositories without touching existing customizations +- Optional `--refresh-existing` flag rehydrates names/descriptions from GitHub +- Designed for both local execution and the accompanying GitHub Action workflow + #### `scripts/bash/manage-modules-sql.sh` - Module Database Integration Executes module-specific SQL scripts for database schema updates. diff --git a/scripts/bash/backup-status.sh b/scripts/bash/backup-status.sh new file mode 100755 index 0000000..0571a5b --- /dev/null +++ b/scripts/bash/backup-status.sh @@ -0,0 +1,421 @@ +#!/bin/bash +# Backup Status Dashboard +# Displays comprehensive backup system status and statistics +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +# Colors +BLUE='\033[0;34m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +CYAN='\033[0;36m' +BOLD='\033[1m' +NC='\033[0m' + +# Icons +ICON_BACKUP="๐Ÿ“ฆ" +ICON_TIME="๐Ÿ•" +ICON_SIZE="๐Ÿ’พ" +ICON_CHART="๐Ÿ“Š" +ICON_SUCCESS="โœ…" +ICON_WARNING="โš ๏ธ" +ICON_SCHEDULE="๐Ÿ“…" + +# Default values +SHOW_DETAILS=0 +SHOW_TRENDS=0 + +usage() { + cat <<'EOF' +Usage: ./backup-status.sh [options] + +Display backup system status and statistics. + +Options: + -d, --details Show detailed backup listing + -t, --trends Show size trends over time + -h, --help Show this help + +Examples: + ./backup-status.sh + ./backup-status.sh --details + ./backup-status.sh --details --trends + +EOF +} + +# Parse arguments +while [[ $# -gt 0 ]]; do + case "$1" in + -d|--details) SHOW_DETAILS=1; shift;; + -t|--trends) SHOW_TRENDS=1; shift;; + -h|--help) usage; exit 0;; + *) echo "Unknown option: $1"; usage; exit 1;; + esac +done + +# Load environment +if [ -f "$PROJECT_ROOT/.env" ]; then + set -a + # shellcheck disable=SC1091 + source "$PROJECT_ROOT/.env" + set +a +fi + +BACKUP_PATH="${BACKUP_PATH:-$PROJECT_ROOT/storage/backups}" +BACKUP_INTERVAL_MINUTES="${BACKUP_INTERVAL_MINUTES:-60}" +BACKUP_RETENTION_HOURS="${BACKUP_RETENTION_HOURS:-6}" +BACKUP_RETENTION_DAYS="${BACKUP_RETENTION_DAYS:-3}" +BACKUP_DAILY_TIME="${BACKUP_DAILY_TIME:-09}" + +# Format bytes to human readable +format_bytes() { + local bytes=$1 + if [ "$bytes" -lt 1024 ]; then + echo "${bytes}B" + elif [ "$bytes" -lt 1048576 ]; then + echo "$(awk "BEGIN {printf \"%.1f\", $bytes/1024}")KB" + elif [ "$bytes" -lt 1073741824 ]; then + echo "$(awk "BEGIN {printf \"%.1f\", $bytes/1048576}")MB" + else + echo "$(awk "BEGIN {printf \"%.2f\", $bytes/1073741824}")GB" + fi +} + +# Get directory size +get_dir_size() { + local dir="$1" + if [ -d "$dir" ]; then + du -sb "$dir" 2>/dev/null | cut -f1 + else + echo "0" + fi +} + +# Count backups in directory +count_backups() { + local dir="$1" + if [ -d "$dir" ]; then + find "$dir" -mindepth 1 -maxdepth 1 -type d 2>/dev/null | wc -l + else + echo "0" + fi +} + +# Get latest backup timestamp +get_latest_backup() { + local dir="$1" + if [ -d "$dir" ]; then + ls -1t "$dir" 2>/dev/null | head -n1 || echo "" + else + echo "" + fi +} + +# Parse timestamp from backup directory name +parse_timestamp() { + local backup_name="$1" + # Format: YYYYMMDD_HHMMSS or ExportBackup_YYYYMMDD_HHMMSS + local timestamp + if [[ "$backup_name" =~ ([0-9]{8})_([0-9]{6}) ]]; then + timestamp="${BASH_REMATCH[1]}_${BASH_REMATCH[2]}" + echo "$timestamp" + else + echo "" + fi +} + +# Calculate time ago from timestamp +time_ago() { + local timestamp="$1" + if [ -z "$timestamp" ]; then + echo "Unknown" + return + fi + + # Parse timestamp: YYYYMMDD_HHMMSS + local year="${timestamp:0:4}" + local month="${timestamp:4:2}" + local day="${timestamp:6:2}" + local hour="${timestamp:9:2}" + local minute="${timestamp:11:2}" + local second="${timestamp:13:2}" + + local backup_epoch + backup_epoch=$(date -d "$year-$month-$day $hour:$minute:$second" +%s 2>/dev/null || echo "0") + + if [ "$backup_epoch" = "0" ]; then + echo "Unknown" + return + fi + + local now_epoch + now_epoch=$(date +%s) + local diff=$((now_epoch - backup_epoch)) + + if [ "$diff" -lt 60 ]; then + echo "${diff} seconds ago" + elif [ "$diff" -lt 3600 ]; then + local minutes=$((diff / 60)) + echo "${minutes} minute(s) ago" + elif [ "$diff" -lt 86400 ]; then + local hours=$((diff / 3600)) + echo "${hours} hour(s) ago" + else + local days=$((diff / 86400)) + echo "${days} day(s) ago" + fi +} + +# Calculate next scheduled backup +next_backup_time() { + local interval_minutes="$1" + local now_epoch + now_epoch=$(date +%s) + + local next_epoch=$((now_epoch + (interval_minutes * 60))) + local in_minutes=$(((next_epoch - now_epoch) / 60)) + + if [ "$in_minutes" -lt 60 ]; then + echo "in ${in_minutes} minute(s)" + else + local in_hours=$((in_minutes / 60)) + local remaining_minutes=$((in_minutes % 60)) + echo "in ${in_hours} hour(s) ${remaining_minutes} minute(s)" + fi +} + +# Calculate next daily backup +next_daily_backup() { + local daily_hour="$1" + local now_epoch + now_epoch=$(date +%s) + + local today_backup_epoch + today_backup_epoch=$(date -d "today ${daily_hour}:00:00" +%s) + + local next_epoch + if [ "$now_epoch" -lt "$today_backup_epoch" ]; then + next_epoch=$today_backup_epoch + else + next_epoch=$(date -d "tomorrow ${daily_hour}:00:00" +%s) + fi + + local diff=$((next_epoch - now_epoch)) + local hours=$((diff / 3600)) + local minutes=$(((diff % 3600) / 60)) + + echo "in ${hours} hour(s) ${minutes} minute(s)" +} + +# Show backup tier status +show_backup_tier() { + local tier_name="$1" + local tier_dir="$2" + local retention="$3" + + if [ ! -d "$tier_dir" ]; then + printf " ${ICON_WARNING} ${YELLOW}%s:${NC} No backups found\n" "$tier_name" + return + fi + + local count size latest + count=$(count_backups "$tier_dir") + size=$(get_dir_size "$tier_dir") + latest=$(get_latest_backup "$tier_dir") + + if [ "$count" = "0" ]; then + printf " ${ICON_WARNING} ${YELLOW}%s:${NC} No backups found\n" "$tier_name" + return + fi + + local latest_timestamp + latest_timestamp=$(parse_timestamp "$latest") + local ago + ago=$(time_ago "$latest_timestamp") + + printf " ${GREEN}${ICON_SUCCESS} %s:${NC} %s backup(s), %s total\n" "$tier_name" "$count" "$(format_bytes "$size")" + printf " ${ICON_TIME} Latest: %s (%s)\n" "$latest" "$ago" + printf " ${ICON_SCHEDULE} Retention: %s\n" "$retention" + + if [ "$SHOW_DETAILS" = "1" ]; then + printf " ${ICON_BACKUP} Available backups:\n" + local backup_list + backup_list=$(ls -1t "$tier_dir" 2>/dev/null || true) + while IFS= read -r backup; do + if [ -n "$backup" ]; then + local backup_size + backup_size=$(get_dir_size "$tier_dir/$backup") + local backup_timestamp + backup_timestamp=$(parse_timestamp "$backup") + local backup_ago + backup_ago=$(time_ago "$backup_timestamp") + printf " - %s: %s (%s)\n" "$backup" "$(format_bytes "$backup_size")" "$backup_ago" + fi + done <<< "$backup_list" + fi +} + +# Show size trends +show_trends() { + printf "${BOLD}${ICON_CHART} Backup Size Trends${NC}\n" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + + local daily_dir="$BACKUP_PATH/daily" + if [ ! -d "$daily_dir" ]; then + printf " ${ICON_WARNING} No daily backups found for trend analysis\n\n" + return + fi + + # Get last 7 daily backups + local backup_list + backup_list=$(ls -1t "$daily_dir" 2>/dev/null | head -7 | tac) + + if [ -z "$backup_list" ]; then + printf " ${ICON_WARNING} Not enough backups for trend analysis\n\n" + return + fi + + # Find max size for scaling + local max_size=0 + while IFS= read -r backup; do + if [ -n "$backup" ]; then + local size + size=$(get_dir_size "$daily_dir/$backup") + if [ "$size" -gt "$max_size" ]; then + max_size=$size + fi + fi + done <<< "$backup_list" + + # Display trend chart + while IFS= read -r backup; do + if [ -n "$backup" ]; then + local size + size=$(get_dir_size "$daily_dir/$backup") + local timestamp + timestamp=$(parse_timestamp "$backup") + local date_str="${timestamp:0:4}-${timestamp:4:2}-${timestamp:6:2}" + + # Calculate bar length (max 30 chars) + local bar_length=0 + if [ "$max_size" -gt 0 ]; then + bar_length=$((size * 30 / max_size)) + fi + + # Create bar + local bar="" + for ((i=0; i/dev/null | wc -l) + if [ "$export_count" -gt 0 ]; then + local export_size=0 + while IFS= read -r export_dir; do + if [ -n "$export_dir" ]; then + local size + size=$(get_dir_size "$export_dir") + export_size=$((export_size + size)) + fi + done < <(find "$BACKUP_PATH" -maxdepth 1 -type d -name "ExportBackup_*" 2>/dev/null) + manual_size=$((manual_size + export_size)) + manual_count=$((manual_count + export_count)) + fi + fi + + if [ "$manual_count" -gt 0 ]; then + printf " ${GREEN}${ICON_SUCCESS} Manual/Export Backups:${NC} %s backup(s), %s total\n" "$manual_count" "$(format_bytes "$manual_size")" + fi + + echo + + # Show next scheduled backups + printf "${BOLD}${ICON_SCHEDULE} Backup Schedule${NC}\n" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + printf " ${ICON_TIME} Hourly interval: every %s minutes\n" "$BACKUP_INTERVAL_MINUTES" + printf " ${ICON_TIME} Next hourly backup: %s\n" "$(next_backup_time "$BACKUP_INTERVAL_MINUTES")" + printf " ${ICON_TIME} Daily backup time: %s:00\n" "$BACKUP_DAILY_TIME" + printf " ${ICON_TIME} Next daily backup: %s\n" "$(next_daily_backup "$BACKUP_DAILY_TIME")" + echo + + # Calculate total storage + local total_size=0 + for tier_dir in "$BACKUP_PATH/hourly" "$BACKUP_PATH/daily"; do + if [ -d "$tier_dir" ]; then + local size + size=$(get_dir_size "$tier_dir") + total_size=$((total_size + size)) + fi + done + total_size=$((total_size + manual_size)) + + printf "${BOLD}${ICON_SIZE} Total Backup Storage: %s${NC}\n" "$(format_bytes "$total_size")" + echo + + # Show trends if requested + if [ "$SHOW_TRENDS" = "1" ]; then + show_trends + fi + + # Show backup configuration + if [ "$SHOW_DETAILS" = "1" ]; then + printf "${BOLD}โš™๏ธ Backup Configuration${NC}\n" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + printf " Backup directory: %s\n" "$BACKUP_PATH" + printf " Hourly retention: %s hours\n" "$BACKUP_RETENTION_HOURS" + printf " Daily retention: %s days\n" "$BACKUP_RETENTION_DAYS" + printf " Interval: every %s minutes\n" "$BACKUP_INTERVAL_MINUTES" + printf " Daily backup time: %s:00\n" "$BACKUP_DAILY_TIME" + echo + fi + + printf "${GREEN}${ICON_SUCCESS} Backup status check complete!${NC}\n" + echo +} + +main "$@" diff --git a/scripts/bash/db-health-check.sh b/scripts/bash/db-health-check.sh new file mode 100755 index 0000000..3f983b0 --- /dev/null +++ b/scripts/bash/db-health-check.sh @@ -0,0 +1,389 @@ +#!/bin/bash +# Database Health Check Script +# Provides comprehensive health status of AzerothCore databases +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +# Colors +BLUE='\033[0;34m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +CYAN='\033[0;36m' +BOLD='\033[1m' +NC='\033[0m' + +# Icons +ICON_SUCCESS="โœ…" +ICON_WARNING="โš ๏ธ" +ICON_ERROR="โŒ" +ICON_INFO="โ„น๏ธ" +ICON_DB="๐Ÿ—„๏ธ" +ICON_SIZE="๐Ÿ’พ" +ICON_TIME="๐Ÿ•" +ICON_MODULE="๐Ÿ“ฆ" +ICON_UPDATE="๐Ÿ”„" + +# Default values +VERBOSE=0 +SHOW_PENDING=0 +SHOW_MODULES=1 +CONTAINER_NAME="ac-mysql" + +usage() { + cat <<'EOF' +Usage: ./db-health-check.sh [options] + +Check the health status of AzerothCore databases. + +Options: + -v, --verbose Show detailed information + -p, --pending Show pending updates + -m, --no-modules Hide module update information + -c, --container NAME MySQL container name (default: ac-mysql) + -h, --help Show this help + +Examples: + ./db-health-check.sh + ./db-health-check.sh --verbose --pending + ./db-health-check.sh --container ac-mysql-custom + +EOF +} + +# Parse arguments +while [[ $# -gt 0 ]]; do + case "$1" in + -v|--verbose) VERBOSE=1; shift;; + -p|--pending) SHOW_PENDING=1; shift;; + -m|--no-modules) SHOW_MODULES=0; shift;; + -c|--container) CONTAINER_NAME="$2"; shift 2;; + -h|--help) usage; exit 0;; + *) echo "Unknown option: $1"; usage; exit 1;; + esac +done + +# Load environment +if [ -f "$PROJECT_ROOT/.env" ]; then + set -a + # shellcheck disable=SC1091 + source "$PROJECT_ROOT/.env" + set +a +fi + +MYSQL_HOST="${MYSQL_HOST:-ac-mysql}" +MYSQL_PORT="${MYSQL_PORT:-3306}" +MYSQL_USER="${MYSQL_USER:-root}" +MYSQL_ROOT_PASSWORD="${MYSQL_ROOT_PASSWORD:-}" +DB_AUTH_NAME="${DB_AUTH_NAME:-acore_auth}" +DB_WORLD_NAME="${DB_WORLD_NAME:-acore_world}" +DB_CHARACTERS_NAME="${DB_CHARACTERS_NAME:-acore_characters}" +DB_PLAYERBOTS_NAME="${DB_PLAYERBOTS_NAME:-acore_playerbots}" + +# MySQL query helper +mysql_query() { + local database="${1:-}" + local query="$2" + + if [ -z "$MYSQL_ROOT_PASSWORD" ]; then + echo "Error: MYSQL_ROOT_PASSWORD not set" >&2 + return 1 + fi + + if command -v docker >/dev/null 2>&1; then + if [ -n "$database" ]; then + docker exec "$CONTAINER_NAME" mysql -h"$MYSQL_HOST" -P"$MYSQL_PORT" -u"$MYSQL_USER" -p"$MYSQL_ROOT_PASSWORD" "$database" -N -B -e "$query" 2>/dev/null + else + docker exec "$CONTAINER_NAME" mysql -h"$MYSQL_HOST" -P"$MYSQL_PORT" -u"$MYSQL_USER" -p"$MYSQL_ROOT_PASSWORD" -N -B -e "$query" 2>/dev/null + fi + else + if [ -n "$database" ]; then + mysql -h"$MYSQL_HOST" -P"$MYSQL_PORT" -u"$MYSQL_USER" -p"$MYSQL_ROOT_PASSWORD" "$database" -N -B -e "$query" 2>/dev/null + else + mysql -h"$MYSQL_HOST" -P"$MYSQL_PORT" -u"$MYSQL_USER" -p"$MYSQL_ROOT_PASSWORD" -N -B -e "$query" 2>/dev/null + fi + fi +} + +# Format bytes to human readable +format_bytes() { + local bytes=$1 + if [ "$bytes" -lt 1024 ]; then + echo "${bytes}B" + elif [ "$bytes" -lt 1048576 ]; then + echo "$(awk "BEGIN {printf \"%.1f\", $bytes/1024}")KB" + elif [ "$bytes" -lt 1073741824 ]; then + echo "$(awk "BEGIN {printf \"%.1f\", $bytes/1048576}")MB" + else + echo "$(awk "BEGIN {printf \"%.2f\", $bytes/1073741824}")GB" + fi +} + +# Check if database exists +db_exists() { + local db_name="$1" + local count + count=$(mysql_query "" "SELECT COUNT(*) FROM information_schema.SCHEMATA WHERE SCHEMA_NAME='$db_name'" 2>/dev/null || echo "0") + [ "$count" = "1" ] +} + +# Get database size +get_db_size() { + local db_name="$1" + mysql_query "" "SELECT IFNULL(SUM(data_length + index_length), 0) FROM information_schema.TABLES WHERE table_schema='$db_name'" 2>/dev/null || echo "0" +} + +# Get update count +get_update_count() { + local db_name="$1" + local state="${2:-}" + + if [ -n "$state" ]; then + mysql_query "$db_name" "SELECT COUNT(*) FROM updates WHERE state='$state'" 2>/dev/null || echo "0" + else + mysql_query "$db_name" "SELECT COUNT(*) FROM updates" 2>/dev/null || echo "0" + fi +} + +# Get last update timestamp +get_last_update() { + local db_name="$1" + mysql_query "$db_name" "SELECT IFNULL(MAX(timestamp), 'Never') FROM updates" 2>/dev/null || echo "Never" +} + +# Get table count +get_table_count() { + local db_name="$1" + mysql_query "" "SELECT COUNT(*) FROM information_schema.TABLES WHERE table_schema='$db_name'" 2>/dev/null || echo "0" +} + +# Get character count +get_character_count() { + mysql_query "$DB_CHARACTERS_NAME" "SELECT COUNT(*) FROM characters" 2>/dev/null || echo "0" +} + +# Get active players (logged in last 24 hours) +get_active_players() { + mysql_query "$DB_CHARACTERS_NAME" "SELECT COUNT(*) FROM characters WHERE logout_time > UNIX_TIMESTAMP(NOW() - INTERVAL 1 DAY)" 2>/dev/null || echo "0" +} + +# Get account count +get_account_count() { + mysql_query "$DB_AUTH_NAME" "SELECT COUNT(*) FROM account" 2>/dev/null || echo "0" +} + +# Get pending updates +get_pending_updates() { + local db_name="$1" + mysql_query "$db_name" "SELECT name FROM updates WHERE state='PENDING' ORDER BY name" 2>/dev/null || true +} + +# Check database health +check_database() { + local db_name="$1" + local display_name="$2" + + if ! db_exists "$db_name"; then + printf " ${RED}${ICON_ERROR} %s (%s)${NC}\n" "$display_name" "$db_name" + printf " ${RED}Database does not exist${NC}\n" + return 1 + fi + + printf " ${GREEN}${ICON_SUCCESS} %s (%s)${NC}\n" "$display_name" "$db_name" + + local update_count module_count last_update db_size table_count + update_count=$(get_update_count "$db_name" "RELEASED") + module_count=$(get_update_count "$db_name" "MODULE") + last_update=$(get_last_update "$db_name") + db_size=$(get_db_size "$db_name") + table_count=$(get_table_count "$db_name") + + printf " ${ICON_UPDATE} Updates: %s applied" "$update_count" + if [ "$module_count" != "0" ] && [ "$SHOW_MODULES" = "1" ]; then + printf " (%s module)" "$module_count" + fi + printf "\n" + + printf " ${ICON_TIME} Last update: %s\n" "$last_update" + printf " ${ICON_SIZE} Size: %s (%s tables)\n" "$(format_bytes "$db_size")" "$table_count" + + if [ "$VERBOSE" = "1" ]; then + local custom_count archived_count + custom_count=$(get_update_count "$db_name" "CUSTOM") + archived_count=$(get_update_count "$db_name" "ARCHIVED") + + if [ "$custom_count" != "0" ]; then + printf " ${ICON_INFO} Custom updates: %s\n" "$custom_count" + fi + if [ "$archived_count" != "0" ]; then + printf " ${ICON_INFO} Archived updates: %s\n" "$archived_count" + fi + fi + + # Show pending updates if requested + if [ "$SHOW_PENDING" = "1" ]; then + local pending_updates + pending_updates=$(get_pending_updates "$db_name") + if [ -n "$pending_updates" ]; then + printf " ${YELLOW}${ICON_WARNING} Pending updates:${NC}\n" + while IFS= read -r update; do + printf " - %s\n" "$update" + done <<< "$pending_updates" + fi + fi + + echo +} + +# Show module updates summary +show_module_updates() { + if [ "$SHOW_MODULES" = "0" ]; then + return + fi + + printf "${BOLD}${ICON_MODULE} Module Updates${NC}\n" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + + # Get module updates from world database (most modules update world DB) + local module_updates + module_updates=$(mysql_query "$DB_WORLD_NAME" "SELECT SUBSTRING_INDEX(name, '_', 1) as module, COUNT(*) as count FROM updates WHERE state='MODULE' GROUP BY module ORDER BY module" 2>/dev/null || echo "") + + if [ -z "$module_updates" ]; then + printf " ${ICON_INFO} No module updates detected\n\n" + return + fi + + while IFS=$'\t' read -r module count; do + printf " ${GREEN}${ICON_SUCCESS}${NC} %s: %s update(s)\n" "$module" "$count" + done <<< "$module_updates" + echo +} + +# Get backup information +get_backup_info() { + local backup_dir="$PROJECT_ROOT/storage/backups" + + if [ ! -d "$backup_dir" ]; then + printf " ${ICON_INFO} No backups directory found\n" + return + fi + + # Check for latest backup + local latest_hourly latest_daily + if [ -d "$backup_dir/hourly" ]; then + latest_hourly=$(ls -1t "$backup_dir/hourly" 2>/dev/null | head -n1 || echo "") + fi + if [ -d "$backup_dir/daily" ]; then + latest_daily=$(ls -1t "$backup_dir/daily" 2>/dev/null | head -n1 || echo "") + fi + + if [ -n "$latest_hourly" ]; then + # Calculate time ago + local backup_timestamp="${latest_hourly:0:8}_${latest_hourly:9:6}" + local backup_epoch + backup_epoch=$(date -d "${backup_timestamp:0:4}-${backup_timestamp:4:2}-${backup_timestamp:6:2} ${backup_timestamp:9:2}:${backup_timestamp:11:2}:${backup_timestamp:13:2}" +%s 2>/dev/null || echo "0") + local now_epoch + now_epoch=$(date +%s) + local diff=$((now_epoch - backup_epoch)) + local hours=$((diff / 3600)) + local minutes=$(((diff % 3600) / 60)) + + if [ "$hours" -gt 0 ]; then + printf " ${ICON_TIME} Last hourly backup: %s hours ago\n" "$hours" + else + printf " ${ICON_TIME} Last hourly backup: %s minutes ago\n" "$minutes" + fi + fi + + if [ -n "$latest_daily" ] && [ "$latest_daily" != "$latest_hourly" ]; then + local backup_timestamp="${latest_daily:0:8}_${latest_daily:9:6}" + local backup_epoch + backup_epoch=$(date -d "${backup_timestamp:0:4}-${backup_timestamp:4:2}-${backup_timestamp:6:2} ${backup_timestamp:9:2}:${backup_timestamp:11:2}:${backup_timestamp:13:2}" +%s 2>/dev/null || echo "0") + local now_epoch + now_epoch=$(date +%s) + local diff=$((now_epoch - backup_epoch)) + local days=$((diff / 86400)) + + printf " ${ICON_TIME} Last daily backup: %s days ago\n" "$days" + fi +} + +# Main health check +main() { + echo + printf "${BOLD}${BLUE}${ICON_DB} AZEROTHCORE DATABASE HEALTH CHECK${NC}\n" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + echo + + # Test MySQL connection + if ! mysql_query "" "SELECT 1" >/dev/null 2>&1; then + printf "${RED}${ICON_ERROR} Cannot connect to MySQL server${NC}\n" + printf " Host: %s:%s\n" "$MYSQL_HOST" "$MYSQL_PORT" + printf " User: %s\n" "$MYSQL_USER" + printf " Container: %s\n\n" "$CONTAINER_NAME" + exit 1 + fi + + printf "${BOLD}${ICON_DB} Database Status${NC}\n" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + echo + + # Check each database + check_database "$DB_AUTH_NAME" "Auth DB" + check_database "$DB_WORLD_NAME" "World DB" + check_database "$DB_CHARACTERS_NAME" "Characters DB" + + # Optional: Check playerbots database + if db_exists "$DB_PLAYERBOTS_NAME"; then + check_database "$DB_PLAYERBOTS_NAME" "Playerbots DB" + fi + + # Show character/account statistics + printf "${BOLD}${CYAN}๐Ÿ“Š Server Statistics${NC}\n" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + + local account_count character_count active_count + account_count=$(get_account_count) + character_count=$(get_character_count) + active_count=$(get_active_players) + + printf " ${ICON_INFO} Accounts: %s\n" "$account_count" + printf " ${ICON_INFO} Characters: %s\n" "$character_count" + printf " ${ICON_INFO} Active (24h): %s\n" "$active_count" + echo + + # Show module updates + show_module_updates + + # Show backup information + printf "${BOLD}${ICON_SIZE} Backup Information${NC}\n" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + get_backup_info + echo + + # Calculate total database size + local total_size=0 + for db in "$DB_AUTH_NAME" "$DB_WORLD_NAME" "$DB_CHARACTERS_NAME"; do + if db_exists "$db"; then + local size + size=$(get_db_size "$db") + total_size=$((total_size + size)) + fi + done + + if db_exists "$DB_PLAYERBOTS_NAME"; then + local size + size=$(get_db_size "$DB_PLAYERBOTS_NAME") + total_size=$((total_size + size)) + fi + + printf "${BOLD}๐Ÿ’พ Total Database Storage: %s${NC}\n" "$(format_bytes "$total_size")" + echo + + printf "${GREEN}${ICON_SUCCESS} Health check complete!${NC}\n" + echo +} + +main "$@" diff --git a/scripts/bash/db-import-conditional.sh b/scripts/bash/db-import-conditional.sh index 137abac..4a1b8c9 100755 --- a/scripts/bash/db-import-conditional.sh +++ b/scripts/bash/db-import-conditional.sh @@ -280,9 +280,78 @@ if [ -n "$backup_path" ]; then return $([ "$restore_success" = true ] && echo 0 || echo 1) } + verify_and_update_restored_databases() { + echo "๐Ÿ” Verifying restored database integrity..." + + # Check if dbimport is available + if [ ! -f "/azerothcore/env/dist/bin/dbimport" ]; then + echo "โš ๏ธ dbimport not available, skipping verification" + return 0 + fi + + # Create dbimport config for verification + echo "๐Ÿ“ Creating dbimport configuration for verification..." + mkdir -p /azerothcore/env/dist/etc + TEMP_DIR="/azerothcore/env/dist/temp" + mkdir -p "$TEMP_DIR" + MYSQL_EXECUTABLE="$(command -v mysql || echo '/usr/bin/mysql')" + cat > /azerothcore/env/dist/etc/dbimport.conf </dev/null 2>&1; then + echo "โš ๏ธ Critical table missing: ${db_name}.${table}" + missing_tables=$((missing_tables + 1)) + fi + done + + if [ "$missing_tables" -gt 0 ]; then + echo "โš ๏ธ ${missing_tables} critical tables missing after restore" + return 1 + fi + + echo "โœ… All critical tables verified" + return 0 + } + if restore_backup "$backup_path"; then echo "$(date): Backup successfully restored from $backup_path" > "$RESTORE_SUCCESS_MARKER" echo "๐ŸŽ‰ Backup restoration completed successfully!" + + # Verify and apply missing updates + verify_and_update_restored_databases + exit 0 else echo "$(date): Backup restoration failed - proceeding with fresh setup" > "$RESTORE_FAILED_MARKER" @@ -311,7 +380,8 @@ cat > /azerothcore/env/dist/etc/dbimport.conf </dev/null 2>&1; then - echo 'Executing module SQL scripts...' - if execute_module_sql_scripts; then - echo 'SQL execution complete.' - else - echo 'โš ๏ธ Module SQL scripts reported errors' - SQL_EXECUTION_FAILED=1 - fi - else - info "SQL helper did not expose execute_module_sql_scripts; skipping module SQL execution" +stage_module_sql_files(){ + # Stage SQL files to AzerothCore's native update directory structure + # This replaces manual SQL execution with AzerothCore's built-in updater + + local staging_dir="${MODULE_STAGING_DIR:-$MODULES_ROOT}" + local sql_manifest="$STATE_DIR/.sql-manifest.json" + + if [ ! -f "$sql_manifest" ]; then + info "No SQL manifest found - no SQL files to stage" + return 0 fi + + # Check if manifest has any modules with SQL + local module_count + module_count=$(python3 -c "import json; data=json.load(open('$sql_manifest')); print(len(data.get('modules', [])))" 2>/dev/null || echo "0") + + if [ "$module_count" = "0" ]; then + info "No modules with SQL files to stage" + return 0 + fi + + info "Staging SQL for $module_count module(s)" + + # Read each module from manifest and stage its SQL + local modules_json + modules_json=$(python3 -c "import json; data=json.load(open('$sql_manifest')); print('\n'.join(m['name'] for m in data['modules']))" 2>/dev/null || echo "") + + if [ -z "$modules_json" ]; then + warn "Failed to parse SQL manifest" + return 1 + fi + + local staged_count=0 + while IFS= read -r module_name; do + if [ -z "$module_name" ]; then + continue + fi + + local module_path="$staging_dir/$module_name" + local acore_modules="/azerothcore/modules/$module_name" + + if [ ! -d "$module_path" ]; then + warn "Module path not found: $module_path" + continue + fi + + # Call stage-module-sql.sh for this module + local stage_script="${PROJECT_ROOT}/scripts/bash/stage-module-sql.sh" + if [ ! -f "$stage_script" ]; then + # Try container location + stage_script="/scripts/bash/stage-module-sql.sh" + fi + + if [ -f "$stage_script" ]; then + if "$stage_script" \ + --module-name "$module_name" \ + --module-path "$module_path" \ + --acore-path "$acore_modules"; then + ((staged_count++)) + fi + else + warn "SQL staging script not found: $stage_script" + fi + done <<< "$modules_json" + + if [ "$staged_count" -gt 0 ]; then + ok "Staged SQL for $staged_count module(s)" + info "SQL will be applied by AzerothCore's updater on next server startup" + fi + + return 0 +} + +execute_module_sql(){ + # Legacy function - now calls staging instead of direct execution + SQL_EXECUTION_FAILED=0 + stage_module_sql_files || SQL_EXECUTION_FAILED=1 } track_module_state(){ @@ -591,13 +655,11 @@ main(){ remove_disabled_modules install_enabled_modules manage_configuration_files - info "SQL execution gate: MODULES_SKIP_SQL=${MODULES_SKIP_SQL:-0}" + info "SQL staging gate: MODULES_SKIP_SQL=${MODULES_SKIP_SQL:-0}" if [ "${MODULES_SKIP_SQL:-0}" = "1" ]; then - info "Skipping module SQL execution (MODULES_SKIP_SQL=1)" + info "Skipping module SQL staging (MODULES_SKIP_SQL=1)" else - info "Initiating module SQL helper" - load_sql_helper - info "SQL helper loaded from ${SQL_HELPER_PATH:-unknown}" + info "Staging module SQL files for AzerothCore updater" execute_module_sql fi track_module_state diff --git a/scripts/bash/stage-modules.sh b/scripts/bash/stage-modules.sh index 2ab1d92..4d1b4d8 100755 --- a/scripts/bash/stage-modules.sh +++ b/scripts/bash/stage-modules.sh @@ -369,10 +369,85 @@ case "$TARGET_PROFILE" in modules) PROFILE_ARGS+=(--profile client-data) ;; esac -# Start the target profile -show_staging_step "Realm Activation" "Bringing services online" -echo "๐ŸŸข Starting services-$TARGET_PROFILE profile..." -docker compose "${PROFILE_ARGS[@]}" up -d +# Stage module SQL to core updates directory (after containers start) +stage_module_sql_to_core() { + show_staging_step "Module SQL Staging" "Preparing module database updates" + + # Start containers first to get access to worldserver container + show_staging_step "Realm Activation" "Bringing services online" + echo "๐ŸŸข Starting services-$TARGET_PROFILE profile..." + docker compose "${PROFILE_ARGS[@]}" up -d + + # Wait for worldserver container to be running + echo "โณ Waiting for worldserver container..." + local max_wait=60 + local waited=0 + while ! docker ps --format '{{.Names}}' | grep -q "ac-worldserver" && [ $waited -lt $max_wait ]; do + sleep 2 + waited=$((waited + 2)) + done + + if ! docker ps --format '{{.Names}}' | grep -q "ac-worldserver"; then + echo "โš ๏ธ Worldserver container not found, skipping module SQL staging" + return 0 + fi + + echo "๐Ÿ“ฆ Staging module SQL files to core updates directory..." + + # Create core updates directories inside container + docker exec ac-worldserver bash -c " + mkdir -p /azerothcore/data/sql/updates/db_world \ + /azerothcore/data/sql/updates/db_characters \ + /azerothcore/data/sql/updates/db_auth + " 2>/dev/null || true + + # Stage SQL from all modules + local staged_count=0 + local timestamp=$(date +"%Y_%m_%d_%H%M%S") + + # Find all modules with SQL files + for db_type in db-world db-characters db-auth; do + local core_dir="" + case "$db_type" in + db-world) core_dir="db_world" ;; + db-characters) core_dir="db_characters" ;; + db-auth) core_dir="db_auth" ;; + esac + + # Copy SQL files from each module + docker exec ac-worldserver bash -c " + counter=0 + for module_dir in /azerothcore/modules/*/data/sql/$db_type; do + if [ -d \"\$module_dir\" ]; then + module_name=\$(basename \$(dirname \$(dirname \$module_dir))) + for sql_file in \"\$module_dir\"/*.sql; do + if [ -f \"\$sql_file\" ]; then + base_name=\$(basename \"\$sql_file\" .sql) + target_name=\"${timestamp}_\${counter}_MODULE_\${module_name}_\${base_name}.sql\" + cp \"\$sql_file\" \"/azerothcore/data/sql/updates/$core_dir/\$target_name\" + echo \" โœ“ Staged \$module_name/$db_type/\$(basename \$sql_file)\" + counter=\$((counter + 1)) + fi + done + fi + done + echo \$counter + " 2>/dev/null | tee /tmp/stage-sql-output.txt || true + + local count=$(tail -1 /tmp/stage-sql-output.txt 2>/dev/null || echo "0") + staged_count=$((staged_count + count)) + done + + if [ "$staged_count" -gt 0 ]; then + echo "โœ… Staged $staged_count module SQL files to core updates directory" + echo "๐Ÿ”„ Restart worldserver to apply: docker restart ac-worldserver" + else + echo "โ„น๏ธ No module SQL files found to stage" + fi +} + +# Stage module SQL (this will also start the containers) +stage_module_sql_to_core printf '\n%b\n' "${GREEN}โš”๏ธ Realm staging completed successfully! โš”๏ธ${NC}" printf '%b\n' "${GREEN}๐Ÿฐ Profile: services-$TARGET_PROFILE${NC}" diff --git a/scripts/bash/test-phase1-integration.sh b/scripts/bash/test-phase1-integration.sh new file mode 100755 index 0000000..8c5bba5 --- /dev/null +++ b/scripts/bash/test-phase1-integration.sh @@ -0,0 +1,315 @@ +#!/bin/bash +# Phase 1 Integration Test Script +# Tests the complete Phase 1 implementation using build and deploy workflows + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +# Colors +BLUE='\033[0;34m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +BOLD='\033[1m' +NC='\033[0m' + +# Icons +ICON_SUCCESS="โœ…" +ICON_WARNING="โš ๏ธ" +ICON_ERROR="โŒ" +ICON_INFO="โ„น๏ธ" +ICON_TEST="๐Ÿงช" + +# Counters +TESTS_TOTAL=0 +TESTS_PASSED=0 +TESTS_FAILED=0 + +info() { + echo -e "${BLUE}${ICON_INFO}${NC} $*" +} + +ok() { + echo -e "${GREEN}${ICON_SUCCESS}${NC} $*" + ((TESTS_PASSED++)) +} + +warn() { + echo -e "${YELLOW}${ICON_WARNING}${NC} $*" +} + +err() { + echo -e "${RED}${ICON_ERROR}${NC} $*" + ((TESTS_FAILED++)) +} + +test_header() { + ((TESTS_TOTAL++)) + echo "" + echo -e "${BOLD}${ICON_TEST} Test $TESTS_TOTAL: $*${NC}" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" +} + +section_header() { + echo "" + echo "" + echo -e "${BOLD}${BLUE}โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•${NC}" + echo -e "${BOLD}${BLUE} $*${NC}" + echo -e "${BOLD}${BLUE}โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•${NC}" + echo "" +} + +# Change to project root +cd "$PROJECT_ROOT" + +section_header "Phase 1 Integration Test Suite" + +info "Project root: $PROJECT_ROOT" +info "Test started: $(date)" + +# Test 1: Verify .env exists +test_header "Environment Configuration Check" +if [ -f .env ]; then + ok ".env file exists" + + # Count enabled modules + enabled_count=$(grep -c "^MODULE_.*=1" .env || echo "0") + info "Enabled modules: $enabled_count" + + # Check for playerbots + if grep -q "^MODULE_PLAYERBOTS=1" .env; then + info "Playerbots module enabled" + fi +else + err ".env file not found" + echo "Please run ./setup.sh first" + exit 1 +fi + +# Test 2: Module manifest validation +test_header "Module Manifest Validation" +if [ -f config/module-manifest.json ]; then + ok "Module manifest exists" + + # Validate JSON + if python3 -m json.tool config/module-manifest.json >/dev/null 2>&1; then + ok "Module manifest is valid JSON" + else + err "Module manifest has invalid JSON" + fi +else + err "Module manifest not found" + exit 1 +fi + +# Test 3: Generate module state with SQL discovery +test_header "Module State Generation (SQL Discovery)" +info "Running: python3 scripts/python/modules.py generate" + +if python3 scripts/python/modules.py \ + --env-path .env \ + --manifest config/module-manifest.json \ + generate --output-dir local-storage/modules > /tmp/phase1-modules-generate.log 2>&1; then + ok "Module state generation successful" +else + # Check if it's just warnings + if grep -q "warnings detected" /tmp/phase1-modules-generate.log 2>/dev/null; then + ok "Module state generation completed with warnings" + else + err "Module state generation failed" + fi +fi + +# Test 4: Verify SQL manifest created +test_header "SQL Manifest Verification" +if [ -f local-storage/modules/.sql-manifest.json ]; then + ok "SQL manifest created: local-storage/modules/.sql-manifest.json" + + # Check manifest structure + module_count=$(python3 -c "import json; data=json.load(open('local-storage/modules/.sql-manifest.json')); print(len(data.get('modules', [])))" 2>/dev/null || echo "0") + info "Modules with SQL: $module_count" + + if [ "$module_count" -gt 0 ]; then + ok "SQL manifest contains $module_count module(s)" + + # Show first module + info "Sample module SQL info:" + python3 -c "import json; data=json.load(open('local-storage/modules/.sql-manifest.json')); m=data['modules'][0] if data['modules'] else {}; print(f\" Name: {m.get('name', 'N/A')}\n SQL files: {len(m.get('sql_files', {}))}\") " 2>/dev/null || true + else + warn "No modules with SQL files (expected if modules not yet staged)" + fi +else + err "SQL manifest not created" +fi + +# Test 5: Verify modules.env created +test_header "Module Environment File Check" +if [ -f local-storage/modules/modules.env ]; then + ok "modules.env created" + + # Check for key exports + if grep -q "MODULES_ENABLED=" local-storage/modules/modules.env; then + ok "MODULES_ENABLED variable present" + fi + + if grep -q "MODULES_REQUIRES_CUSTOM_BUILD=" local-storage/modules/modules.env; then + ok "Build requirement flags present" + + # Check if build required + source local-storage/modules/modules.env + if [ "${MODULES_REQUIRES_CUSTOM_BUILD:-0}" = "1" ]; then + info "Custom build required (C++ modules enabled)" + else + info "Standard build sufficient (no C++ modules)" + fi + fi +else + err "modules.env not created" +fi + +# Test 6: Check build requirement +test_header "Build Requirement Check" +if [ -f local-storage/modules/modules.env ]; then + source local-storage/modules/modules.env + + info "MODULES_REQUIRES_CUSTOM_BUILD=${MODULES_REQUIRES_CUSTOM_BUILD:-0}" + info "MODULES_REQUIRES_PLAYERBOT_SOURCE=${MODULES_REQUIRES_PLAYERBOT_SOURCE:-0}" + + if [ "${MODULES_REQUIRES_CUSTOM_BUILD:-0}" = "1" ]; then + ok "Build system correctly detected C++ modules" + BUILD_REQUIRED=1 + else + ok "Build system correctly detected no C++ modules" + BUILD_REQUIRED=0 + fi +else + warn "Cannot determine build requirements" + BUILD_REQUIRED=0 +fi + +# Test 7: Verify new scripts exist and are executable +test_header "New Script Verification" +scripts=( + "scripts/bash/stage-module-sql.sh" + "scripts/bash/verify-sql-updates.sh" + "scripts/bash/backup-status.sh" + "scripts/bash/db-health-check.sh" +) + +for script in "${scripts[@]}"; do + if [ -f "$script" ]; then + if [ -x "$script" ]; then + ok "$(basename "$script") - exists and executable" + else + warn "$(basename "$script") - exists but not executable" + chmod +x "$script" + ok "Fixed permissions for $(basename "$script")" + fi + else + err "$(basename "$script") - not found" + fi +done + +# Test 8: Test backup-status.sh (without running containers) +test_header "Backup Status Script Test" +if ./scripts/bash/backup-status.sh 2>&1 | head -10 | grep -q "BACKUP STATUS"; then + ok "backup-status.sh executes successfully" +else + err "backup-status.sh failed to execute" +fi + +# Test 9: Test db-health-check.sh help +test_header "Database Health Check Script Test" +if ./scripts/bash/db-health-check.sh --help | grep -q "Check the health status"; then + ok "db-health-check.sh help working" +else + err "db-health-check.sh help failed" +fi + +# Test 10: Check modified scripts for new functionality +test_header "Modified Script Verification" + +# Check manage-modules.sh has staging function +if grep -q "stage_module_sql_files()" scripts/bash/manage-modules.sh; then + ok "manage-modules.sh contains SQL staging function" +else + err "manage-modules.sh missing SQL staging function" +fi + +# Check db-import-conditional.sh has playerbots support +if grep -q "PlayerbotsDatabaseInfo" scripts/bash/db-import-conditional.sh; then + ok "db-import-conditional.sh has playerbots database support" +else + err "db-import-conditional.sh missing playerbots support" +fi + +if grep -q "Updates.EnableDatabases = 15" scripts/bash/db-import-conditional.sh; then + ok "db-import-conditional.sh has correct EnableDatabases value (15)" +else + warn "db-import-conditional.sh may have incorrect EnableDatabases value" +fi + +# Check for post-restore verification +if grep -q "verify_and_update_restored_databases" scripts/bash/db-import-conditional.sh; then + ok "db-import-conditional.sh has post-restore verification" +else + err "db-import-conditional.sh missing post-restore verification" +fi + +# Test 11: Docker Compose configuration check +test_header "Docker Compose Configuration Check" +if [ -f docker-compose.yml ]; then + ok "docker-compose.yml exists" + + # Check for required services + if grep -q "ac-mysql:" docker-compose.yml; then + ok "MySQL service configured" + fi + + if grep -q "ac-worldserver:" docker-compose.yml; then + ok "Worldserver service configured" + fi +else + err "docker-compose.yml not found" +fi + +# Test Summary +section_header "Test Summary" + +echo "" +echo -e "${BOLD}Tests Executed: $TESTS_TOTAL${NC}" +echo -e "${GREEN}${BOLD}Passed: $TESTS_PASSED${NC}" +if [ $TESTS_FAILED -gt 0 ]; then + echo -e "${RED}${BOLD}Failed: $TESTS_FAILED${NC}" +else + echo -e "${GREEN}${BOLD}Failed: $TESTS_FAILED${NC}" +fi +echo "" + +# Calculate success rate +if [ $TESTS_TOTAL -gt 0 ]; then + success_rate=$((TESTS_PASSED * 100 / TESTS_TOTAL)) + echo -e "${BOLD}Success Rate: ${success_rate}%${NC}" +fi + +echo "" + +if [ $TESTS_FAILED -eq 0 ]; then + echo -e "${GREEN}${BOLD}${ICON_SUCCESS} ALL TESTS PASSED${NC}" + echo "" + echo "Phase 1 implementation is working correctly!" + echo "" + echo "Next steps:" + echo " 1. Run './build.sh' if C++ modules are enabled" + echo " 2. Run './deploy.sh' to start containers" + echo " 3. Verify SQL staging with running containers" + echo " 4. Check database health with db-health-check.sh" + exit 0 +else + echo -e "${RED}${BOLD}${ICON_ERROR} SOME TESTS FAILED${NC}" + echo "" + echo "Please review the failures above before proceeding." + exit 1 +fi diff --git a/scripts/bash/verify-sql-updates.sh b/scripts/bash/verify-sql-updates.sh new file mode 100755 index 0000000..92b6e47 --- /dev/null +++ b/scripts/bash/verify-sql-updates.sh @@ -0,0 +1,348 @@ +#!/bin/bash +# Verify SQL Updates +# Checks that SQL updates have been applied via the updates table +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +# Colors +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +BOLD='\033[1m' +NC='\033[0m' + +# Icons +ICON_SUCCESS="โœ…" +ICON_WARNING="โš ๏ธ" +ICON_ERROR="โŒ" +ICON_INFO="โ„น๏ธ" + +# Default values +MODULE_NAME="" +DATABASE_NAME="" +SHOW_ALL=0 +CHECK_HASH=0 +CONTAINER_NAME="ac-mysql" + +usage() { + cat <<'EOF' +Usage: ./verify-sql-updates.sh [options] + +Verify that SQL updates have been applied via AzerothCore's updates table. + +Options: + --module NAME Check specific module + --database NAME Check specific database (auth/world/characters) + --all Show all module updates + --check-hash Verify file hashes match database + --container NAME MySQL container name (default: ac-mysql) + -h, --help Show this help + +Examples: + ./verify-sql-updates.sh --all + ./verify-sql-updates.sh --module mod-aoe-loot + ./verify-sql-updates.sh --database acore_world --all + +EOF +} + +# Parse arguments +while [[ $# -gt 0 ]]; do + case "$1" in + --module) MODULE_NAME="$2"; shift 2;; + --database) DATABASE_NAME="$2"; shift 2;; + --all) SHOW_ALL=1; shift;; + --check-hash) CHECK_HASH=1; shift;; + --container) CONTAINER_NAME="$2"; shift 2;; + -h|--help) usage; exit 0;; + *) echo "Unknown option: $1"; usage; exit 1;; + esac +done + +# Load environment +if [ -f "$PROJECT_ROOT/.env" ]; then + set -a + # shellcheck disable=SC1091 + source "$PROJECT_ROOT/.env" + set +a +fi + +MYSQL_HOST="${MYSQL_HOST:-ac-mysql}" +MYSQL_PORT="${MYSQL_PORT:-3306}" +MYSQL_USER="${MYSQL_USER:-root}" +MYSQL_ROOT_PASSWORD="${MYSQL_ROOT_PASSWORD:-}" +DB_AUTH_NAME="${DB_AUTH_NAME:-acore_auth}" +DB_WORLD_NAME="${DB_WORLD_NAME:-acore_world}" +DB_CHARACTERS_NAME="${DB_CHARACTERS_NAME:-acore_characters}" +DB_PLAYERBOTS_NAME="${DB_PLAYERBOTS_NAME:-acore_playerbots}" + +# Logging functions +info() { + echo -e "${BLUE}${ICON_INFO}${NC} $*" +} + +ok() { + echo -e "${GREEN}${ICON_SUCCESS}${NC} $*" +} + +warn() { + echo -e "${YELLOW}${ICON_WARNING}${NC} $*" +} + +err() { + echo -e "${RED}${ICON_ERROR}${NC} $*" +} + +# MySQL query helper +mysql_query() { + local database="${1:-}" + local query="$2" + + if [ -z "$MYSQL_ROOT_PASSWORD" ]; then + err "MYSQL_ROOT_PASSWORD not set" + return 1 + fi + + if command -v docker >/dev/null 2>&1; then + if [ -n "$database" ]; then + docker exec "$CONTAINER_NAME" mysql -h"$MYSQL_HOST" -P"$MYSQL_PORT" -u"$MYSQL_USER" -p"$MYSQL_ROOT_PASSWORD" "$database" -N -B -e "$query" 2>/dev/null + else + docker exec "$CONTAINER_NAME" mysql -h"$MYSQL_HOST" -P"$MYSQL_PORT" -u"$MYSQL_USER" -p"$MYSQL_ROOT_PASSWORD" -N -B -e "$query" 2>/dev/null + fi + else + if [ -n "$database" ]; then + mysql -h"$MYSQL_HOST" -P"$MYSQL_PORT" -u"$MYSQL_USER" -p"$MYSQL_ROOT_PASSWORD" "$database" -N -B -e "$query" 2>/dev/null + else + mysql -h"$MYSQL_HOST" -P"$MYSQL_PORT" -u"$MYSQL_USER" -p"$MYSQL_ROOT_PASSWORD" -N -B -e "$query" 2>/dev/null + fi + fi +} + +# Check if database exists +db_exists() { + local db_name="$1" + local count + count=$(mysql_query "" "SELECT COUNT(*) FROM information_schema.SCHEMATA WHERE SCHEMA_NAME='$db_name'" 2>/dev/null || echo "0") + [ "$count" = "1" ] +} + +# Verify module SQL in database +verify_module_sql() { + local module_name="$1" + local database_name="$2" + + if ! db_exists "$database_name"; then + err "Database does not exist: $database_name" + return 1 + fi + + info "Checking module updates in $database_name" + + # Query updates table for module + local query="SELECT name, hash, state, timestamp, speed FROM updates WHERE name LIKE '%${module_name}%' AND state='MODULE' ORDER BY timestamp DESC" + local results + results=$(mysql_query "$database_name" "$query" 2>/dev/null || echo "") + + if [ -z "$results" ]; then + warn "No updates found for module: $module_name in $database_name" + return 0 + fi + + # Display results + echo + printf "${BOLD}${CYAN}Module Updates for %s in %s:${NC}\n" "$module_name" "$database_name" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + + while IFS=$'\t' read -r name hash state timestamp speed; do + printf "${GREEN}${ICON_SUCCESS}${NC} %s\n" "$name" + printf " Hash: %s\n" "${hash:0:12}..." + printf " Applied: %s\n" "$timestamp" + printf " Speed: %sms\n" "$speed" + echo + done <<< "$results" + + return 0 +} + +# List all module updates +list_module_updates() { + local database_name="$1" + + if ! db_exists "$database_name"; then + err "Database does not exist: $database_name" + return 1 + fi + + info "Listing all module updates in $database_name" + + # Query all module updates + local query="SELECT name, state, timestamp FROM updates WHERE state='MODULE' ORDER BY timestamp DESC" + local results + results=$(mysql_query "$database_name" "$query" 2>/dev/null || echo "") + + if [ -z "$results" ]; then + warn "No module updates found in $database_name" + return 0 + fi + + # Display results + echo + printf "${BOLD}${CYAN}All Module Updates in %s:${NC}\n" "$database_name" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + + local count=0 + while IFS=$'\t' read -r name state timestamp; do + printf "${GREEN}${ICON_SUCCESS}${NC} %s\n" "$name" + printf " Applied: %s\n" "$timestamp" + ((count++)) + done <<< "$results" + + echo + ok "Total module updates: $count" + echo + + return 0 +} + +# Check update applied +check_update_applied() { + local filename="$1" + local database_name="$2" + local expected_hash="${3:-}" + + if ! db_exists "$database_name"; then + err "Database does not exist: $database_name" + return 2 + fi + + # Query for specific file + local query="SELECT hash, state, timestamp FROM updates WHERE name='$filename' LIMIT 1" + local result + result=$(mysql_query "$database_name" "$query" 2>/dev/null || echo "") + + if [ -z "$result" ]; then + warn "Update not found: $filename" + return 1 + fi + + # Parse result + IFS=$'\t' read -r hash state timestamp <<< "$result" + + ok "Update applied: $filename" + printf " Hash: %s\n" "$hash" + printf " State: %s\n" "$state" + printf " Applied: %s\n" "$timestamp" + + # Check hash if provided + if [ -n "$expected_hash" ] && [ "$expected_hash" != "$hash" ]; then + err "Hash mismatch!" + printf " Expected: %s\n" "$expected_hash" + printf " Actual: %s\n" "$hash" + return 2 + fi + + return 0 +} + +# Generate verification report +generate_verification_report() { + echo + printf "${BOLD}${BLUE}๐Ÿ” Module SQL Verification Report${NC}\n" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + echo + + local total_updates=0 + local databases=("$DB_AUTH_NAME" "$DB_WORLD_NAME" "$DB_CHARACTERS_NAME") + + # Add playerbots if it exists + if db_exists "$DB_PLAYERBOTS_NAME"; then + databases+=("$DB_PLAYERBOTS_NAME") + fi + + for db in "${databases[@]}"; do + if ! db_exists "$db"; then + continue + fi + + # Get count of module updates + local count + count=$(mysql_query "$db" "SELECT COUNT(*) FROM updates WHERE state='MODULE'" 2>/dev/null || echo "0") + + if [ "$count" != "0" ]; then + printf "${GREEN}${ICON_SUCCESS}${NC} ${BOLD}%s:${NC} %s module update(s)\n" "$db" "$count" + total_updates=$((total_updates + count)) + + if [ "$SHOW_ALL" = "1" ]; then + # Show recent updates + local query="SELECT name, timestamp FROM updates WHERE state='MODULE' ORDER BY timestamp DESC LIMIT 5" + local results + results=$(mysql_query "$db" "$query" 2>/dev/null || echo "") + + if [ -n "$results" ]; then + while IFS=$'\t' read -r name timestamp; do + printf " - %s (%s)\n" "$name" "$timestamp" + done <<< "$results" + echo + fi + fi + else + printf "${YELLOW}${ICON_WARNING}${NC} ${BOLD}%s:${NC} No module updates\n" "$db" + fi + done + + echo + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + printf "${BOLD}Total: %s module update(s) applied${NC}\n" "$total_updates" + echo +} + +# Main execution +main() { + echo + info "SQL Update Verification" + echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" + echo + + # Test MySQL connection + if ! mysql_query "" "SELECT 1" >/dev/null 2>&1; then + err "Cannot connect to MySQL server" + printf " Host: %s:%s\n" "$MYSQL_HOST" "$MYSQL_PORT" + printf " User: %s\n" "$MYSQL_USER" + printf " Container: %s\n\n" "$CONTAINER_NAME" + exit 1 + fi + + # Execute based on options + if [ -n "$MODULE_NAME" ]; then + # Check specific module + if [ -n "$DATABASE_NAME" ]; then + verify_module_sql "$MODULE_NAME" "$DATABASE_NAME" + else + # Check all databases for this module + for db in "$DB_AUTH_NAME" "$DB_WORLD_NAME" "$DB_CHARACTERS_NAME"; do + if db_exists "$db"; then + verify_module_sql "$MODULE_NAME" "$db" + fi + done + if db_exists "$DB_PLAYERBOTS_NAME"; then + verify_module_sql "$MODULE_NAME" "$DB_PLAYERBOTS_NAME" + fi + fi + elif [ -n "$DATABASE_NAME" ]; then + # List all updates in specific database + list_module_updates "$DATABASE_NAME" + else + # Generate full report + generate_verification_report + fi + + echo + ok "Verification complete" + echo +} + +main "$@" diff --git a/scripts/python/__pycache__/modules.cpython-312.pyc b/scripts/python/__pycache__/modules.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4c1d63f096740660755a605e89bec8a7acd88a10 GIT binary patch literal 31659 zcmeHw3ve9AdFIUS>|)>V1@?i*;*G_d1fSqbBpxJ15FiOs4^R{e#4JfbcxiS)3af>T z$Vv>_ObA++19+B?%d--J5w8TLoDyA^#ByvWuH=%;azX8C&6JtgcNeGb5{QtKkX85i z|DK)M#d1J0c2aezyD6Zjryqa+-P7~;-(UaT`zJ=Dj)Ln;yIn(n^8!Wv7$3w-C3xu7 z*Jz5Gr&y|wVrf<}LXRr?6g2vkBg#G{xiftXxvTnAg;o-%gpb~ zKS!~eZ_s@Ote4fk#q_z@LRJTNH(SK&;qGCJSp(d?0j+}CPqD_gDAx48La5LC>H75* zren04PvUs;o$g~Z(xk|H3r7c z`$FTs2@X;W27H49W8-5(g99TzA&V~<8VCh^p>qQv9XozuEEwVf1Eanm6f@!r4st^i zAt+>E3=0ZQjs|>Z#yQM$c4+Kuy^jl=o*Wv1RtkgygQ21Ev3e|2#|9<>V{Bk-aLRXn zU}T6Lz^F#uPqAu#I;9^N8ykmWAu!nOSEjU(U|?`$AQ()k2ZHR-U?`gdhJx_iGl8@RMpCNdlM^F> z-3*kQqBo=z<6|l1R3MmQLfm9vFx}Y56{QsZ>DANVnWrMu3_V4Mq!bZKd^N+EGUpOs z-*Eb~$&MD|LTWMeJk?A272TbFCZ&YtPN~2dnn_|gRYZB7hB97+GK?aH%$-%SYEkgK1T8qtOsgWyV`(xG)nnsT5HDBbJgGJX zkiwkdLG-bi6zT}@#cDj%v}&w+9eP@cUj91tGAV#3kzPaD zsG1C&Y1npyPH8}1jI$tC9@6Cx^8&9;jB_F1LweUwv0FW)%YOR5;d)3{`!y+b;5GQp zgDEY@Dz-lqcrBFD1t&&^LL)##jqsQuSWm* z{s|BzDJA$)8gAf1Kaxl>#8ti$nBrVOEQsH(k6fG^i5o}8puT0tt-cc?b{F;e()$n$I z!rm0KH^uE6X4{i`(;Rzo52kmz=liboMU~N`acA{K-K=u^CXC_~bR4PwG1rV%Lw^&zVm1|K{2W4PX=&%(zZvI|cXB?&DM7$Dw$45C1t zMOPhiNGYMs@LEGx`gEa1)Uy0)#FD4;)=43KuG5-zQs^Wa+DIN}$>J|BlrL}vHId^_ zFXjaP@*=A96lcn66+$vgl~Wt2AbkOJ<`+S$qp3OiQvGYn7pV)h-_jj+ z9~RX*lp{XSia@`b9AjI3DVpPofIe3YPAX4O)`H7UfTG24fr;^O zp;S^j%TO=~!r501La{nr+75~psF9#LiaMtcBGQUr04ng%oI@aN_Kk`(u$=1b04%cF zZ{n(v4QnSfM)!EA6Pu!gWg)y2wSu28h9bm{b6d%^tnE87>2y}fHz^AAc0 zyJyP3JaTblVSC(EIkW$+-Z^*nozSwr;;zZXyG45bx|xG_Jw;!uoH>}Zx^UR>&8qI2 z-3yiPI+o2#RGvSsl3gcbI7gG&^Z1Bc&2Y6#eZW7EXPeqKF=-npQ{DFc1tw z$$1eaYd{_%d4v;9MDtiq=u3r=LT*?zieeZj?*PhLc?~+EIxC5BIbAZN3rDnaT5V>Y zB$g-MK_u>Cs7oUt7PD(-%Br0%qI+WPbpNVqXU?jfKB9kO?ezaDYIj!Xndv@hh!~zw zFE^z4zqwBiF*q#$0rX7xj{TN_N)XRFl;XVze57#_#65bCcKL9i4g&mw@y7z8Np39I zN**u(;RqNICnj*DL{#JSKoG!{sR?k#eIqbw@r|F6$oO#T5tK4_yf!cjXho~+Q1US+ls7K?Y{yagW zPz{9wqX7Gy9T`6jGs8d#6sLj7kr3B`bck}G=K>=DNu3Y$hsOQJlwNr4f@&Z>5cNn4 z1(9iAFr`5+mQU)6uQ`}9NeEj%Nv=6Rzzq$I!E7}z?MrDeOFuir2?X$Uru1pfNw7}f zUhqRYP|(mH#MuUR3F#8l3>!NX92`F%;QA%jxO0&71pEj0!dN#$t+=RaKaBfk&%$#t zb9ponGuO==xR*6ib@3Y4iiUC)FHFI#GU+UddU!{D(&Jy;!@GBg_s_H{i+bk3iixUh zc(40qww?FX8~ftO+jUgA%k ziys(@w-4WOyz-e6^Q`1yF~Eq+Cza?ai@q51G%gjyJlkduL1h!>(wMpQj=4PPt4sKr zW4`94g1B${KX0k-(&&MhyMFOP%)Mpy*`(Q>F#BR=-yL&V(pUSQ`MP;2FYep+&tB&K zn7i&@qPY7)!#3etU1=3`DPbZH-ATIA+6=w=DeCRa=5(Bv#93si=m-_7 zY@D9N+0s-DtQ|^lq@Rq0rjtD7_n8RC1?eXR=@$cxB^kB(6|4 z&Q9Wrq&P=byExfm;Ijnd0{O5c;d_e_HORh#DZRfer9Vv80eTVBCt@vUmIBfeK;V>q zED&IW{RF$0Bpr$`0G!Qf(b#_mMuTx~>Y)}f^^<^6p8|j^@aiOpqhNQ~C}QkFjFJnS zfvFX&0h|w{)G#9pO$Jkj(<9@9uk-^*htoD2pwyv$GF=-P=|49<{t609vV74$I1(5b zo16$+B&x!+^c2?vl})K&VjKu|ru2a^g!$Qi0P0d!@gZ>L3@$!E!E`f$>r&Rt0z^Md zIRQUUX)X+KV=yYi&=9~GW$>W`2W#ZM-Y3=Y6n6^BTZ35PRDnznu-OUB4fovZamqbQ z()5$@sJ}m@>+eqwZv81kfB&nK z10y(6L5#V-KWk~Gzn??6j(Y~333OgXhcF&O-y}Nc(ZRRGT|nnGbf(Zj;FNnEod`P9 z=(M4OO`Ouef)uP{g{DYdZ%~A5q4F^Cp*VCd!+-F9fb+}LJ+0x32a-DD7Y_=K)fs1; zNvk8yI6n21EHTTT%`*pPhvLlUq`PRQZMG=R6eVq*naVl zDwZqu@J#-KHqPw%)K|I0F8g-Obk7}(Gdq%nzL_>&>y9(Nq_SbllUEyr^CZj70VGrM#6`P5ak$S%8DfE6gA<UEbrlPIMZ< zd1xgcQC62dMKBZ&(btsWY+wwQ=tA)64urUI%p`S+HD4P{!G*7_4?OFAZSB-$dQfqe z8dRJDd=-|I=;7=MJ>15wT4$8oR<$y~6doNvo=@WDJ!j`4y5}Oal zHy{40`nI{7*K|LCTM%LC%hnp^I#a{Qw+4QFPth8M>$e7Hf>y%T&AB)QX`|vt+lZTk zq<&?lmADZI;I4qPwt;W~5>(iQ0p(Opc}uR@o)r7dgQ3@4gIUN3^@@ohGQ zJ;=O%n?>li6_>0G07B5m6tYv;%<6$Mv(6AvzD9EfK&q8;D-Q3c*8#HoBP<7}tcQEr zPjq$kAL-irV#l$4J;!_fDxvWao8@++gR-3TSeOiv9!rm=bV85ohiNB_vC`}bR;NgZ zB&~!K24Sw?7KiYDDQSOje3Basgq!3$_PQBrv3xN4Z|nS&Cv~W1M`6^ zfrX=2&hgI8vpR5+Mb+0{zWOp>zb9Vw46pIrHQQkg?6#wauiKTV>xtF%#LY)|&5;N2 zn)9~=@eN*f>@}}_ulTabdu;{JJe66RXX&?a@fp9_oU2`0TgkLJwDg)!48>{-u3{en z&mjQlvCEtS(4B~t9D?|mezme9=ik!vG~ir5hMJ=Oqw)eRd_oH0!@)rj3>p-T&D>YP zoBa(v)b2z9`Pu}xJG}b|Zy`N3B)VJUPEfE0MuNK=rTpJR(iPzRHT91()NR|#AD^Gw zlCW3C?3K6e`xdJbwfkbV`{MR}G26=zNZ)v~qKXE-lmlZ^LU;QmB!QhY-DR9o56CV% zT=!34^t(t>P+RVrZBR6S^hDz5so2p|%eI%}=9hWR%VgL?sr(kC<$e{NFM{LOaKDWH zUqR zbOh87OeENVO2sNet9s~FwMwO1H7Qj`>Cbhvs-G4JBe^x;F^oS#uN;HnFuPY~fY510 zW)(AQwNZ(yfbb81E3EfnpC^~7&?EE}bOK9SCjgXW+LF~K z7O~Et3z$Kgo2M=7@TFt*Vu{n%bE!DTNzGYMFi=(Pq}shx5h$mAT93VI5+B5bx8TNS^EpwX}v#Qy~;WPWvK0&KU8@)|kFj9x0Zs zp_Cugh0RdEV&LZaP?K1Gq*$cSx?gFBaZJH_pzXZVrEKBw1~FCUo=HDlCf_pfzKbHI z;1);9;LWP0%OM>qmN}O!x8))?!0m4dZ{yU_bj!-n;JjcKwmF9|Jj$1tp$+lAM{_~T zYz3d>+PGNzN5|MhqC zCC!B_7vKvgvvh~#%a?qGl23bq@>h00f^H&)nNki;Ob9ABPMbM=p<$Qr+&~cQYsMPL zB3!ko(pC#_7KaHXOeJ1kl;4UG+sOtJyU z*+_B<)W#tIO`FozN4Go0?efx8^6G5Fo$w7H0ZA)$QY>@;e$-kE#k|8 z2|Cz)gSje!p$I&R)lE6jtQ4sORt`A?vw!Xo7LwC(u+O9{Sy+DmnUR6BnXaEw0uG*o z*QOXm-bwFG<-z(htP_mobW<>^1v^+`cP_xW*loD0SXf@#f-99};t%Hgfr);E`~mr& z7@6e2?p}jm8H90=BM=M@oDBe6|5}I}AU3lCU=f5h&#_ma#RRh|)I<=bc=9Cx8G14U z7Ru#*4>JD|{0HA<5RSNO@-7?@OrZ)YqNm~oEnp9nEG?hepL78xj0U*}K37v!{%@S% z8P`2)=_<8DRRYGaCi`5+#SvohH22)aX=3aoc<$zdozyk|Re!W|u_Nx;7|(B+)hE5a zYum4GhZ4LkuzZ@-gRy*h(o>o6)Wtk?aZkgNGv?WH@z75^#e7Lq+|vw;l6Dud-Rg_m zYi`>c7x&F-&;+aI-S%a(U$TJWTMqF>U2)rC-f%cs)BK5jL)89n+7^_*et-u*u~9z z>Tj~k=9a92PVhy~#cj{?hUb%;IzK7ezu5lHV6@`vLwwPG$OCTDU3|@b)y~-1<)W!r8Ca^X`sW6F50WWou2$ zz#>_DMY~WPx0KDQ?m4{khprroD&IZ!Uf=aT!6x~ZJzlvh?$`|m)tbVD#s@3TQTwu{ z@@~E_>W=5v&FX(*ElgVMNh>T2IFqh&{Bsszh~1rZS0+m~B)xuov=u>ABdGFqFj%WsqmS^-Akp{lX}Mkb7eFdv4diz_-1?*S*MF`aV}G zjHVSGRnc_2ykqJ7M{j)i2H(;#t6tXllEoE?;w`s}w_FL{YMWJGGThUcFPko!=FWej zDPFliLr+@?(^MDTOMl|;{WyQI>Vw)FwM%DWwL5u#@65B{03d4j+@}nxT6nP{7K&!w zMJ2O&NssSZ)77T?lv0c9zpk=trmLoCIPTgwd+<|xVf0|4Vo$tcPtsB_Z@OYy2;Z^P zuH-@5`x?q%zgt!rJ^PLNCF||7jaOd0rJ3za7I;a=DTo*NVU^xpa?NwqvtWomyLf)_ z+|ub=u3I`jzYUi2p>+;lJe)B2Vg_Hd;Eut+;syp+iV3TXssUKlTQBeW@~(8sg{o@} zR~wcMH5k0-D`=LE&AZTft>o`m%BbBVmPQg@QXYkIN71>8g`7e1jUxW%EVzLc^jyI?Zo-fp@)l zN7J_gR*@%ZuxG$_WK<@Bxz>xV3(w!yS1jg#;JM-X-HqRF{Z{L3e@B|Lq^%Pr|I(G0-mQ7B{(61f*@U5H=jX;egL;L^ zQ>h=^A5~C}jSuddsNw_k188_pC)}Q;soLXk-w0I*;>ht`+IFm1{k>+}v2ykIciN8C zsQ-9_<5&ap=Y{#lHZp(S!l1hi-8GmK;2Q$w^oIm<0$d-#x||Dk-r<&-H0lGp=fFHs z9wn8-wMG3HZL?t`3pJe9)3OTzZC3qtSX+Wv9P)B5A@(dY4eK=1Yp^azugggckDxt+ z0VJSa3OTbvz>~?a?&c7Wu6?kXo9{W^##| zD^OF(@hHz>RxVi{U>-3IIb{$Di1{+ikMV8iTuN>={sb$BvT%2=+nz$ z9uhtvylpv~c(EsC%30?P8`dd9dW-)IWf)nLY(1nM9YPsTX$ zXg-VR_i%NlY|+OS$i7RdrPuHv}0$cgiy#{8tq}SjUnVgHO zknc?pDl?`*s0K!HYzD+{!8=_%#ie{@x?adLhTAB7m#o_%oBsZ!w`cV#*|V-*BbjKFpBoVLC2t z+}j%6@?Pup)@c3Wz>@kS!-s~&H+c8%Wz8PI4d57x+kmJU+-cY$|2^+@@3NsGX>cVB zB{4%uRQ;arx^CG}CnS#R%NNy&n$~zt>k~8GG1RS?ftM8B>sCX2 z!vVhO*&meh&aQ;BH|Fe(J5MY#&m|e7&~E>vq??v{KYHQA7jBKjHy`3Rb^Ty3@9bV? zda{T$r^~I6Z`jW_ANT>yJD*)<4oNAJx)y+~61EL7+lFOZ%f*&kc3!s&P!8>$&y`A* z8judL5=mVXN!k#zH7whjF19T7^1AJqbVqJdzF^Zgdv7_vUHq-$_~rxrrh_@9@&y~e z*|IeGQRKr&yrrGr*ugtHmzn+d3QD7gV+9)!RAn3qrXd@B?-S&1Tx9nIi zrm{9s*&eHGk5_gkOREy4J7c9g;m}VdBwp~$Csk+Qq= z7s+p;w6+xkw0XkQ6!QRHz2WbT*nS3RcmhL~+%}i2=qbPm^;h-Df};8Gm2k4CJWKy8su9bYsUCvi-kGZyI*GFukos0Ccrgjmku|cfH&#-|W+#jT&)Cc!% zRACSOAm#0TaDN*F1aTDklbzdo&89!q*n1t8KW(!2x^1!Yt-ZzSH2@h%H~}D=0D%Yg zd;o=^nG#n6swjlyURhHPMS5a|6Bs;WMrN`oOkk#!(~7ZkKv^MCmQ1)Z^nY&3JAktC z=cKI8lrQ?Scs7M3;}`%0>FfzS9E^YmPyhe~$5ZIVyb%e2fyI9?pOTbXg{0K3@%dHV zsl4>yhO1u02$lOSAQvt;A*}ZZ8zEuO1DKPKbFDsD{3B`*Q4Yw64pV{j8lGx`8yX9N zCG4L;5bT#rXCfw)-18X5kiC7_`eYy!Mn4_N1|vP_7e9fh-@u{o1MuLHN9MWhxAO{< zb@Vh;_M+75Z=BWY@%-QWD5?|(Et^oSAU=(;)oiCfZXeGuL{H#Hb~9| zp)#L3ghDB~7hp4@Xgik{Z8FFb2s}e>PO#ch$j-49a;Q6`l|z>la(FtVl|j8B6ywf{$YC!g7c$4py5{G%~=92juq+A-H+MKDs=u6vQngNki$XVz~@j8g# zB<9N8rFi|iT*-V(5?6+-_OwLQ(Aw4z4$S0|!_|NbnJ-DCw{{&ai?;14X}5}HXYNm= zZGS4-(wSE4^5}RfI*MV$9&)ZH^5_(3L+imelY0T4nh%IA+TdoXUsZ5DeJzh*OFe@Md}y!O-@qz0X9|$EzFecy%Z2$%yZzcMgS2}9vnD}9BUsuNYeK zW&I9Jt>7>OS5Xegm;)txL@3d11Vqb)P@t!e10?{LJq(t7rDF)KeP$?3p*G5iUdc zulk*qYpqvX7YA-_x}$mKu11eIWBTkn#~n>^QfrvY`$}=r-}ph%jiOJ?TNlpHspoZX z>v;3lSq7YBzI*=Mm2)6)Zf#mFekPv(%&Z=ehdJZID+{Lw57S^PXmiciUa6gJOL~j1ZM(WH+L@@>6|2}4_wJtUlEU8Wx!x1^ zZo&{~(Z-m!@wRuBs*q6(nl@XU8z>ALNxitdB+V~c#(Uby-~^h}~^N306X zR%g5K8mx0&QF~MsJ$6&|f&PYmsq>>lA0CR=Ji}M-#r}h#H+XRRoi&m_GLq>428s|&e*2Tc+Gyk`T%b`xNLY96ZJ&fqMH{LOX@`Pf!oao zZfy9$0AG8Aw;f$J9D{h?QN5^)PA=|S>V*8aH+9~46>$E>Zr;|jY&f!pz(=OvHr?3p z@m{{Rleg_(HXINg9RDGoXP=Bi{-h2=iAb8uMWkVNJ3csc zm3}x{9(6`f-*kT9z2RLt5%=%l zYj*OsUCV~u$Xs`{C0ZHfZdQI!ccX6U%txaij>c=-d4C6Q>s&VMe=LWe7`i_5V-NYf zo-(_6Z_|=;aguj$j+?gdnk^6Rze2;8@!)=82G2E7mTd&j{r$ZSM=iQP*keEHG<>(p ze$-?7euMpJ$=a>odGM_h0A&`Wdk(^t4YWWqyhJ7Hab`W)3Ia?OWq_2YqN2#C8S)d( zr=pT|!Z>WOk&HG0g#|2DxFSqMDViF<?jPYq=bcZjgt`wq48~ za*94VpoUeFsW_9FiiafNMh>jGDQ!U}{1=L}Gk5O9I{d2%|1vpb_*XwQ|8N*GOm2TJ z{--okS~$C$D5q-_Qs||^X#$A3ND3(+?$dBOmY`~<6;3@UoHU*ha6=JRH2Av7?)9{u znaUFnE5hELKDj~AHOtPy?<>Gr8=^gk%n1N+78MoDhm3;0S=zJod~{#*LUice##m|3 zgH;5E-ZWe3$wP$r+STK~`ghU4yLH*;JDl^Y6Cy*I1AcXDGx~gC>mxyMoD_qn+x?<# z#QjfTbxjMYghQNup7znNZJ(0;OU`In^h9+3yM|cF{s-LuhO#C*k#1A*LXyJUB#ad<2f{JAgrdj?V9)gUj@S2Dpg6zXS(<_yV@hT;Sk%4>cP= z6A=1)PwD9~Z;US2kHT@!mg%fj5 z2jg_0Wc?#5T6y3Ah7U*6x6dEGau`mWN||hIEaD~{O)*DP z+_7Qi@Lj8Oj-4OBG9I^9C9I7xYh&EnJOjgN!Gb2*5H~c>blhdEbCnC8Wv1d@p5aSx zy!l4bRd`K%RSSLfW7Qv-zGGT0?TNd3W{sel+!eMEtSA&t>pg4noEZ*8_E}d{N~>cP z7lXk}-{ze6T=D$OtO51KMrn*+io6*~=6mLct_;E3y;b?``ft@Qm+X({?yIEof)Qg0A2erP)B!U2Q~QGW=%D|?8Nn|%_?d}vL)rV*)>Svo{}eS+hnNP&wW znsVvjs9{zMcyD$(e5*MZ+Hl`Ah(icD+(cRk%p_&OLi(mzN`*_Ha%EcJd??$RGG$N7 z4@tP-O=*8V)Pt7WQO!D7XI9C0qAECex{|HVX`>n_UkAD5%Ga$aAJ40+?S`pu&Nv|a`Vh!ZtWyHKyeI^_I$RIFh80*rgwqDo<#UjBcvjlI)%ND`CX&% zZjrcPf+cNt%_`zC+5KdB8cJf(k8Y7cbeAMjTngUeS_LJOP*hJi__EU)XSXb=pn8o(Yd>?Ei0fa%^i2Rk0cU*HKJY$qqU!dZ&s$CJbYSdlLLOvw-)wiwLf zUzU=ejHi=hgWNOFZNj!AxHK+O!KLnCSl!S-&T&+7KY}FOJ#>Bx9efOvgARS-8hTc7 z>GgU){1{W-;JNXkL717tVGr*fcfKea(QWKZa>avn;L%roF z5QRb5;(ez*>Ugi{dQrTvDN(pJR=71@xczrGea!rk@jJ%&j_$;c=VCjai|=?LZg~+7 z`PbOyHZN4*_SI!g1tKF|U+tQGmDf~92f!i2S=c5n;5SGogM}M~F1EopbjBxz`h@es za8J1S+4uq@aL#qJd{0>hp8xgq6tRzyJ|WX*V0k3nC%AlgwSa1`+ZX;?>;_x`q|646 zQ||PO727;xG!brjVsCgfQycaT)aQSmzTt~Hmo>!nPNr8#hkrE)#t;o;+jY1lt78oK zgeb9JkQD#FcRZW+2A$_<8$i(wl~TFH>t z*o{W`69a<*a{gaRiAH~UDUBGPGVSHg!p~2Rg^rLBGG&0@^67_I5|%RIQ*iRMFnk76 zI(!x$QU*Bgzdx;M2FY~RuZM#w1Twm;OlDCt6Lad*hWI!nb1o3!DIGTvOviE_Az<*d z7*L1-dTc#0qy)u2D%)9Di7?H!BT0!H*ft+&G{)yW76VI8k<(Ml&a`+N~Kcc zxr#N&vqeZ1t>c-(Wv20-)hnz}R=r9oZ0eb3S2gJ@K-e2&ynHE`U3-_A z9a(uwlE!^tXN`H3B#$mP&n}W@_Zp_3BTPTPhG|>UTCh;STS`eo!$}2C*01Nwc-a_9i;6P|2`iZiPpH?J&Mt!G+|ap^q0{7*rPMH=gkzdG9mb<9<4`H)aAktSDHHd% z=oFyiK?m$HDB+l~Epz&lok${LDhpc5~|KccqXp|<^yGTc{FX8(+SRjs5quNoY*d9~3)J6Bt`(#{oF*HP&ILE)mCS152+ zmGHPi9=D_;`e}^mr==+HZVAz}dUXtb78`EeN*(Q3_0aULRUJ)lU)8kITUWz$F>PGk vM|aV*?emuvO4X8Nw|2?IyTeTF-IBZAik;cL_0`tZ35yI$FL#rxOvSGgA=mJtz$&8z}0Z@IwnmHA8Rwnx?2( zil_Q0p5}F7I-={-(U{kV^?iDhGJOn5**=z}hCTyHjeSOva(x`6OxP4L_nApsHf)Jl z`>YXLpDkkVvqv0#j)=3*N$L#YqDXOHam3Z5@XrGO ztZx|lN_iV^=N-J0FXD^eVEf8=S6?|_$Gcx;`YHsojyg^8o>wWpg9d>4oG+MO?)+^yZAkP4WzXuYV$vS zFYkr&-J45I6yMD6gz`GRh2I5fz0kndzSKxjV!e(Mj6>(O<K7!r zWk`w+d;7&`#Ctj zSn$61Vn78ABB(md7hi-4m^asiM*}au=<^1Kd2d7z2Qs7LLsBq$O%S2m5ivR>1ZBw^ z8kVDE$ii4il0(A-W)-V9J9)3SQ${dhSPn&p1L5$v_a#9Xk-U)gj!J^q%nSXYVL;j+ z62d$|83@W=iQqM7Rwo!3_Ffgd;^?qf3iJzrBsAEXuEzqtUSR(2-vR8@IVYdO57< zp;Xahj=?5AgJ_4Ti%yIj7!_e;gGgZsPQ|3II&7!H;c{FJ;Eb>iix@0YY@@?LIP(K= z9Ag6SV^w?vRWaf#MSS|;$Oz1d95~R6Ur!O>xMEE zhCpQ^nbE#()7lN4pyTvVhQ4aGnDRPa&oj5#cK|yEn%V}=z#EYs@}}G7ci3-$1uA*l zrqQ=;G1_HL#RH=NT6(+aoy@qmZBfj-c=v73H;%Og;7b1ma6q^D@-1){ZA`f|o~3ga zDPINN1q&}^PC|4zW^T^zS*-GFC|&Hp`G7nK^e0OLyk{?Vcfs`r3e*UfJun)Uk9zC0 ztWWa!bc!)kp)i9&cqC?QZYD%hF$6|NgkfGWs5CS7Ts{f)Hp2aH*wf&Z1h{o@8u<*_ z^I`WA5RDdZo7y1|fhf>tQOH1eL;YU3_ejaEg?JzcJ_h;<;=LBDU`^nZ!|fG!Kvo^h zOTuuhHPbm#WH|~Y+4&IkyH9uaoasI3@993*-i4$|D*$-4(C4F7lonjfvF4Eg5Z6yo zQiyeYD=-#{j7Geq%-cA2VE=v~WD+cvM5fa($iNIn1W}zqw(BiRsfSxW4p2A=wXfpl zW>M$|RD;bLl~&9W@Daf;i=#s9)CINHI~<4z`|`GYA0o=J9-!Ew5a7pQ%>)U^Ig((F z%=>hrH`}49xtSNon}Jxz9_t{ZMot3kVVCF1cr7VX|Sjm z{eC_g^!pVna0WkskxE4mNEI__f`t+sKL*lkih-1g5)vD)_hF-Y_@#X#XVtcQ*|z&HOIK`1COguMd6g+$W=hkXJMC;v z+n!oCQf%306vG+a&%#9m-M#X#q#GVTRTwNaSB(UQQ zXMxM9myoMcAW8<6st;IPK85of`geg0|XR>=Mlc?4KHM7fpx_xKjhR9*TGkoQI3O5 z(**|_I62TK#hAG}AfGUj5bhUGKqK)9h$QU5>-Cb!;%bG4L$co=^W=}BMJpDt1xgH) zRN7OKU~ZVxrPT@M$5yfe!F=v)0w^-r=b?0#`g$H7&v9;DTsM^AxPT!WK)cc9@z}yJ zdfA{g$b+SZvYc0|g~``!(+|V5@q#U0r{2sPfhm7RM90hF##~&d$s&(Y6Z*J*2nDMx zLDQ3lQ2g2wc!Sn{f{{_2B4G&6-J>BhqMFe<0M|;fwT=QU2bq!VDDS1y$x(%*bvAfmZ?#GWT9=zu<2@-QLwhE}vo? z6~j@GX%q&zsbb)Pw(+1^;7}Dd91ZZ2i04W%o$2X0-%gYY>xEHJpCW?##gGODXbw$$ zM)666)&p8a4gB3b#LPaZDfm;nB3!G2J!QHTUGg zz#{)wL%$kYd_KANGibr#V*ln%q(?>2c(nW(!@K6>(m{{4d{$iIJrJ!LTdMyH4T zZww?hx!U&Ve^b?JZmZY-Z9NMgn`k(0DItqL+oIa!kwECK5doeZ$c|@% z11zXbZV6&_j&$(`*`-y-xxBF$<1~zgBO@}sY1C>!EnRG zY=4oGQU4@is7%Y{Qr?p5?Zz({(gMiIC&)%)rTZjec-fb4aC!3O6ERmk$kl$7&rg7G zUQaTyvU({h$|qFWkgo?ZMG#M*5UsEwS%^q6j)=gZn#8QCVS-514Kc34+lVNDiugX^ zq2mE{d>=jOvnfSaM?+!WugW)Q8X8wvF)%zJC>$yi7rMK9MBF`veR(tr)``oQ*AECX z;q73AK*1R_e;_jkVGN8fAUq)fQCKMq))$|N5DgKRHXd7QTRN0% zy|k{Ui!RgaddlVmnb=)6d*bE^ATO@EL|YoZ&fGjR_w4-Dl-rkRPdi-G@gK(5oSr%E zPh9i8e_SzdU3hw_V##{v%)Op_9mxw9lO;VX&fc`uMTppAE7qNBj^dfeeiWNKb>C5w zc6!!<_LL^Dt-2pZi6ue?K&2n@Z#Mdk8UNKj} zuA%@nmq$fl3SH4*!N;g;{s^=YU&iPFM1TbMOC+T)Va|sU*2m0fn7WMO@f84j49gE; zgw#uX3L`urVicnz80F;LWB7>?oV}1(#_2!TU#ES#m}=}e+C{bsgn3=de!PmJoY71P>waiD4iXKH>xtOzuaJwD z^}i>n`>sO^%uN4W&s@jPx|dytzK|HG`hlZ}2IYwbRXrp_ zB7<#OF+URsk7}~VP&71*+5~ni;teJiPg&ZePly4iAkmK?BOw6+#i73G&xU?JwCMWd z=x?nZbNrI~y^41${-!F?Gu=1U_v7c2){e<`h(5G9*X(7<@^dNs(@E~>w8cLC_|)St z#6@<+btq*ylw=Nl@xVygPSX-{)t`AvxoDa0p<@noN(Fx{|{#&T}C_F zaBvT52Nzg67?~#MoCY(51a?cv7ZT!nxEhslJs#DI6dW5}-l5fZi88bmQThg>>^SC8 zfIVr~Eya&bRx|bawSz6a|E2#0iAN{S?28;LIQ}5OO4PRK92cEZUN$ zOcN{9D**cw_>=k}f-P=&HSa$Lj`v^aj$%Z{BkeG(OOqbQQ3UarpD;yJ8Hg4xjv=-~ayipagKhI3;{z z0lOU@VhQO=Msh9&MIjOhgY}!RJwM3b;%iX-I>J>$1j}JGzZU;~e5PZ?SeXXaad75j zVl3f+6FqrWRh;wbW%j`SDK!2j#TnINFxcXRArv}nIV!lFQCvHYSTBKroWK=0e0)~X zlvtiosTRV*g0WpO+vOKpoBIOIvHi_QuIy20zheJZcU$k%?f%}bb6wrfb}7cpH$155 zL;;>6Mz3bhGH;zS8UHSEYy|4YKSh^O!8 z@F(>^gn~rztl_31S=zARTHsU8gR9OX%g!T9wJGNllbva2`K;rnBjwyV+4-Txop$d| zn9>&as-bT z+Tq@7jj4jIv!Z<#4A_Rot|WLH9-u<75nz?bZd@FW2Cu)(Xomyr5IWhSwI4!NbxX+E zEdD@CLe~CKNC^N-Lg|D)PZ5CI1_Hk!t_QZ4JyLN!(d_l#1=g3xp`#nnUrhy=Xxmm9LpbI^ySAEhkB0T6IukS%#sQlE{kfoer`zfc0SU4=vQNeCoH zr5GMPWiYM+a5(HAj7DFQ6ubI-<`07CI6OK6jK(Ypa^?jLzF4qSiR1aq#vu$#qwrvp zfssT!S^p0HBpxE*G>mDLaW6COnP=vDSD4z|_j7Y&D@U483tiD%xhtas;aZB>H@w2WT+lbc8WlD%y7I zvv`1Q)A&O0HGO7v0O{~HeP-<<(C1F*v+&56ZRV84uN^cPlkOYF#AXS1<2JozAz)0! z+pa_26@XsJTglg~MAot*vG`WN8^2+cSsRVyle32XLc5083YFvYxwa}=3jK1}Z%#|g zF~M!;Y1Y;YRs$WQ3==FEhJk(X);INF!7{|zYqVGqXL%dSxN!re5V|r| z)_lM0xkdM1mHn!0sp*dJ_o2I?fZCc>$~$UFu$gbJ_8LR%UspgzIJX!WL_!RvsSftwQB!z)&9lurS|ta-|f6Jl)Ml~ z9lx5Y3IhDlGFQIFxMtc{n2NNuXu5N%b7mlAt-f#FJ-=(RJzdnY5L*Z@?Myn3Po7B| z9W$fL#;SBh&3xUh=7q-PibqzdePU5Cb{P5*N_)Jl_NpXMU=?(Fpn^3;6PTc>HU6 z$Pz!XE_Wq!hU9Jf%;V46)znflilZk?41EPetqD`yRLD3zVb1j) zHy2!stU(%xfCorLVW=#T%aDq!d^rIxNrmvnvnJpPYfd`e20NFNOSJh;*y2`Tcx*x3 zwk(ttp#C&&+YWC$>p!#&ME>lNA#5}I-1xSEFli&w)(7^ZfF>Ykjrj%VRj>_j)`&dM z@Rm1>u%1PkgG|M(TE8%M{Sb&oTf-&@Yn+YS2z#=CZf*qvZy{es-Wr2<)CuD-8|N~4 zPWEWH0McA|K2VAluPp)eos1wu!}+{%0UiL1ajsvNHUBZ!HA5iee6}v}XMjP`qrQm* zT3o~kd87Cj82u6>~cIRr5vs0Q9r z{6yted=#U<#0c4#h^7TX(1|tJXID5)LxCp(bP+`*nXn6Rj8L62)!`ddjY=_G9Y@Vs zF+xgM9}CZw0uK+9#ETW4Bt_zGq4FY-WI^L5Y%z-tXC~oEA*?4GsXVBdxQr6anzyR> z9|870Hu{+GA8>bZl&;!$F57p``xY*w?E5EArw^U@t?lAM$DI;18Qp9Dp!Gtce|l(Y zXwH+e*UXnJ+Z&R$i<2D?f!V#Z`hT&E!|;){IBhAKK00-Drhmn<<1>~rRlad>?%G?i zTd{@u#a*eI$5NGtQrzLCl4b76bv-nhJiSgq0zNa`h#+w5wwF*v(^e zV<}f-f?Km!&&ho7v{^rCsT=>eaHQ<+7$6A1qltaBBI$sXP8uRd2ey@>2&_W>1{?teCP@ z&7E1X)@g4TkFQvFrz>_PI%W<|ox_3c-aBKQj!bY)(?SMZl)peM3O20;z1`(*?Z_O z9_V0siLc0P$?4t9U$`N<%bhswV>Z2qX0?aM;YiFAsD zZ=hMmnRA2ynUDn>&myUS*UNlu1x_D^oTdTni39dp3S_AOT<*0IS=VGy9&0qhI1HL` zC9aPXx%S4f!VHerp$j_nb1rUx{9%&+HV`(GAJt|OZd_uh8xstb$A-SvZA2rf#9*nI z`bR;rM5CE%^fjulJg)$1m8R^+91;;QccA;S;ToFrC7)R_1c|SaSc4ER5eP{D+X%4* z|1}o=TZ{<#x`(;nKtycr**l*RJl(-)I1C=)o3H(U2Jqiw6Qq)p6x`d|y2-Y*vjkrB z*nvE(61X|B%f7LR_6>eYq@FpsV!xxPBPVBHP51b4UQbW@mn~8`R3W2 zB0~(QnF9*Y*#JD(0FTz$4WH0~`e(>iXEOlz6u@y>!BEz|rj>wtW6R~WT<%Z&;v=&2f`L_m`kBNAW!)d|*l96d>kDAYqdAl|~ zP<_ciZPj6ib2D-j9u>^PN#;}(hdJduslXGA+v*#3qfzw@^A&f27h;|hv*OHrjaF z9AH^BEhzinBCIS!2;u=)D;ao~} z5P{j);+f%7mP#O-;A3!k>hgVC(;HpjO>A|d$H9H;3vcXttLawLTP?R*7OGPfk3-2{ zpM3A^yJz3)dbcZi?n>(T3zO|@w(_~U1@|w@-Y#3QJu=zxu>-ty-?{?c6^=cVXV$FV z`I3bakUFY=Rh_av1@NAR1#Yo!$-R1{Yxzi5%G3RQc&|$wP8%(+#eWcA<7_jQH%`s7 zub-Veo@_Y2)Vp;0&e3}V_bw%$c{XW(ZiTy)b~rya*%JJ8WGXUuapCX^kXlN|RxVy! zrCS!a4yxC(IYVkXo z>G(b4o$;jk+bhh4w8i?l1t1B=hZ5>Be`{}QuhIWq4cp#mL~mAHOTS+guo<#L4{h3y zbyVjI;x|zYe&6H;p`{BLD7+KEPkAzLp^pGo)%gN*P}++HUnwPG|5r-!=^zLD(1X7t z!fy9^bB-EU04K5ez+HCjFpENX3l?U~yz~Yfe3Mzr#{Ic`>*4 z>!r&VK$2HIsjrYjo@*L&$fNKBvCHu0yBECc!HI-Z4=`RA^-%ve%%O~s=XK&FlgUF1 zGs4Rt5@1|l0_i3afV6^vfP=qDKc9HVzITEYl58I@q*MJ1j#=nD~&)rlX$!WN90A$rIiS9Pu@ z#Qy=GaG@!w17di8q3KT*T4|&V2vORL@N9Y~#(w XxdU^L% List[Dict[str, object]]: return validated +def discover_sql_files(module_path: Path, module_name: str) -> Dict[str, List[str]]: + """ + Scan module for SQL files. + + Returns: + Dict mapping database type to list of SQL file paths + Example: { + 'db_auth': [Path('file1.sql'), ...], + 'db_world': [Path('file2.sql'), ...], + 'db_characters': [Path('file3.sql'), ...] + } + """ + sql_files: Dict[str, List[str]] = {} + sql_base = module_path / 'data' / 'sql' + + if not sql_base.exists(): + return sql_files + + # Map to support both underscore and hyphen naming conventions + db_types = { + 'db_auth': ['db_auth', 'db-auth'], + 'db_world': ['db_world', 'db-world'], + 'db_characters': ['db_characters', 'db-characters'], + 'db_playerbots': ['db_playerbots', 'db-playerbots'] + } + + for canonical_name, variants in db_types.items(): + # Check base/ with all variants + for variant in variants: + base_dir = sql_base / 'base' / variant + if base_dir.exists(): + for sql_file in base_dir.glob('*.sql'): + sql_files.setdefault(canonical_name, []).append(str(sql_file.relative_to(module_path))) + + # Check updates/ with all variants + for variant in variants: + updates_dir = sql_base / 'updates' / variant + if updates_dir.exists(): + for sql_file in updates_dir.glob('*.sql'): + sql_files.setdefault(canonical_name, []).append(str(sql_file.relative_to(module_path))) + + # Check custom/ with all variants + for variant in variants: + custom_dir = sql_base / 'custom' / variant + if custom_dir.exists(): + for sql_file in custom_dir.glob('*.sql'): + sql_files.setdefault(canonical_name, []).append(str(sql_file.relative_to(module_path))) + + # ALSO check direct db-type directories (legacy format used by many modules) + for variant in variants: + direct_dir = sql_base / variant + if direct_dir.exists(): + for sql_file in direct_dir.glob('*.sql'): + sql_files.setdefault(canonical_name, []).append(str(sql_file.relative_to(module_path))) + + return sql_files + + @dataclass class ModuleState: key: str @@ -103,6 +161,7 @@ class ModuleState: dependency_issues: List[str] = field(default_factory=list) warnings: List[str] = field(default_factory=list) errors: List[str] = field(default_factory=list) + sql_files: Dict[str, List[str]] = field(default_factory=dict) @property def blocked(self) -> bool: @@ -340,6 +399,30 @@ def write_outputs(state: ModuleCollectionState, output_dir: Path) -> None: encoding="utf-8", ) + # Discover SQL files for all modules in output directory + for module in state.modules: + module_path = output_dir / module.name + if module_path.exists(): + module.sql_files = discover_sql_files(module_path, module.name) + + # Generate SQL manifest for enabled modules with SQL files + sql_manifest = { + "modules": [ + { + "name": module.name, + "key": module.key, + "sql_files": module.sql_files + } + for module in state.enabled_modules() + if module.sql_files + ] + } + sql_manifest_path = output_dir / ".sql-manifest.json" + sql_manifest_path.write_text( + json.dumps(sql_manifest, indent=2) + "\n", + encoding="utf-8", + ) + def print_list(state: ModuleCollectionState, selector: str) -> None: if selector == "compile": diff --git a/scripts/python/update_module_manifest.py b/scripts/python/update_module_manifest.py new file mode 100755 index 0000000..583c4d2 --- /dev/null +++ b/scripts/python/update_module_manifest.py @@ -0,0 +1,298 @@ +#!/usr/bin/env python3 +"""Generate or update config/module-manifest.json from GitHub topics. + +The script queries the GitHub Search API for repositories tagged with +AzerothCore-specific topics (for example ``azerothcore-module`` or +``azerothcore-lua``) and merges the discovered projects into the existing +module manifest. It intentionally keeps all user-defined fields intact so the +script can be run safely in CI or locally to add new repositories as they are +published. +""" + +from __future__ import annotations + +import argparse +import json +import os +import re +import sys +import time +from dataclasses import dataclass +from typing import Dict, Iterable, List, Optional, Sequence +from urllib import error, parse, request + +API_ROOT = "https://api.github.com" +DEFAULT_TOPICS = [ + "azerothcore-module", + "azerothcore-module+ac-premium", + "azerothcore-tools", + "azerothcore-lua", + "azerothcore-sql", +] +# Map topic keywords to module ``type`` values used in the manifest. +TOPIC_TYPE_HINTS = { + "azerothcore-lua": "lua", + "lua": "lua", + "azerothcore-sql": "sql", + "sql": "sql", + "azerothcore-tools": "tool", + "tools": "tool", +} +CATEGORY_BY_TYPE = { + "lua": "scripting", + "sql": "database", + "tool": "tooling", + "data": "data", + "cpp": "uncategorized", +} +USER_AGENT = "acore-compose-module-manifest" + + +def parse_args(argv: Sequence[str]) -> argparse.Namespace: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--manifest", + default="config/module-manifest.json", + help="Path to manifest JSON file (default: %(default)s)", + ) + parser.add_argument( + "--topic", + action="append", + default=[], + dest="topics", + help="GitHub topic (or '+' separated topics) to scan. Defaults to core topics if not provided.", + ) + parser.add_argument( + "--token", + help="GitHub API token (defaults to $GITHUB_TOKEN or $GITHUB_API_TOKEN)", + ) + parser.add_argument( + "--max-pages", + type=int, + default=10, + help="Maximum pages (x100 results) to fetch per topic (default: %(default)s)", + ) + parser.add_argument( + "--refresh-existing", + action="store_true", + help="Refresh name/description/type for repos already present in manifest", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Fetch and display the summary without writing to disk", + ) + parser.add_argument( + "--log", + action="store_true", + help="Print verbose progress information", + ) + return parser.parse_args(argv) + + +@dataclass +class RepoRecord: + data: dict + topic_expr: str + module_type: str + + +class GitHubClient: + def __init__(self, token: Optional[str], verbose: bool = False) -> None: + self.token = token + self.verbose = verbose + + def _request(self, url: str) -> dict: + req = request.Request(url) + req.add_header("Accept", "application/vnd.github+json") + req.add_header("User-Agent", USER_AGENT) + if self.token: + req.add_header("Authorization", f"Bearer {self.token}") + try: + with request.urlopen(req) as resp: + payload = resp.read().decode("utf-8") + return json.loads(payload) + except error.HTTPError as exc: # pragma: no cover - network failure path + detail = exc.read().decode("utf-8", errors="ignore") + raise RuntimeError(f"GitHub API request failed: {exc.code} {exc.reason}: {detail}") from exc + + def search_repositories(self, topic_expr: str, max_pages: int) -> List[dict]: + query = build_topic_query(topic_expr) + results: List[dict] = [] + for page in range(1, max_pages + 1): + url = ( + f"{API_ROOT}/search/repositories?" + f"q={parse.quote(query)}&per_page=100&page={page}&sort=updated&order=desc" + ) + data = self._request(url) + items = data.get("items", []) + if self.verbose: + print(f"Fetched {len(items)} repos for '{topic_expr}' (page {page})") + results.extend(items) + if len(items) < 100: + break + # Avoid secondary rate-limits. + time.sleep(0.5) + return results + + +def build_topic_query(expr: str) -> str: + parts = [part.strip() for part in expr.split("+") if part.strip()] + if not parts: + raise ValueError("Topic expression must contain at least one topic") + return "+".join(f"topic:{part}" for part in parts) + + +def guess_module_type(expr: str) -> str: + parts = [part.strip().lower() for part in expr.split("+") if part.strip()] + for part in parts: + hint = TOPIC_TYPE_HINTS.get(part) + if hint: + return hint + return "cpp" + + +def normalize_repo_url(url: str) -> str: + if url.endswith(".git"): + return url[:-4] + return url + + +def repo_name_to_key(name: str) -> str: + sanitized = re.sub(r"[^A-Za-z0-9]+", "_", name).strip("_") + sanitized = sanitized.upper() + if not sanitized: + sanitized = "MODULE_UNKNOWN" + if not sanitized.startswith("MODULE_"): + sanitized = f"MODULE_{sanitized}" + return sanitized + + +def load_manifest(path: str) -> Dict[str, List[dict]]: + manifest_path = os.path.abspath(path) + if not os.path.exists(manifest_path): + return {"modules": []} + try: + with open(manifest_path, "r", encoding="utf-8") as handle: + return json.load(handle) + except json.JSONDecodeError as exc: + raise RuntimeError(f"Unable to parse manifest {path}: {exc}") from exc + + +def ensure_defaults(entry: dict) -> None: + entry.setdefault("type", "cpp") + entry.setdefault("status", "active") + entry.setdefault("order", 5000) + entry.setdefault("requires", []) + entry.setdefault("post_install_hooks", []) + entry.setdefault("config_cleanup", []) + + +def update_entry_from_repo(entry: dict, repo: dict, repo_type: str, topic_expr: str, refresh: bool) -> None: + # Only overwrite descriptive fields when refresh is enabled or when they are missing. + if refresh or not entry.get("name"): + entry["name"] = repo.get("name") or entry.get("name") + if refresh or not entry.get("repo"): + entry["repo"] = repo.get("clone_url") or repo.get("html_url", entry.get("repo")) + if refresh or not entry.get("description"): + entry["description"] = repo.get("description") or entry.get("description", "") + if refresh or not entry.get("type"): + entry["type"] = repo_type + if refresh or not entry.get("category"): + entry["category"] = CATEGORY_BY_TYPE.get(repo_type, entry.get("category", "uncategorized")) + ensure_defaults(entry) + notes = entry.get("notes") or "" + tag_note = f"Discovered via GitHub topic '{topic_expr}'" + if tag_note not in notes: + entry["notes"] = (notes + " \n" + tag_note).strip() + + +def merge_repositories( + manifest: Dict[str, List[dict]], + repos: Iterable[RepoRecord], + refresh_existing: bool, +) -> tuple[int, int]: + modules = manifest.setdefault("modules", []) + by_key = {module.get("key"): module for module in modules if module.get("key")} + by_repo = { + normalize_repo_url(str(module.get("repo", ""))): module + for module in modules + if module.get("repo") + } + added = 0 + updated = 0 + + for record in repos: + repo = record.data + repo_url = normalize_repo_url(repo.get("clone_url") or repo.get("html_url") or "") + existing = by_repo.get(repo_url) + key = repo_name_to_key(repo.get("name", "")) + if not existing: + existing = by_key.get(key) + if not existing: + existing = { + "key": key, + "name": repo.get("name", key), + "repo": repo.get("clone_url") or repo.get("html_url", ""), + "description": repo.get("description") or "", + "type": record.module_type, + "category": CATEGORY_BY_TYPE.get(record.module_type, "uncategorized"), + "notes": "", + } + ensure_defaults(existing) + modules.append(existing) + by_key[key] = existing + if repo_url: + by_repo[repo_url] = existing + added += 1 + else: + updated += 1 + update_entry_from_repo(existing, repo, record.module_type, record.topic_expr, refresh_existing) + + return added, updated + + +def collect_repositories( + client: GitHubClient, topics: Sequence[str], max_pages: int +) -> List[RepoRecord]: + seen: Dict[str, RepoRecord] = {} + for expr in topics: + repos = client.search_repositories(expr, max_pages) + repo_type = guess_module_type(expr) + for repo in repos: + full_name = repo.get("full_name") + if not full_name: + continue + record = seen.get(full_name) + if record is None: + seen[full_name] = RepoRecord(repo, expr, repo_type) + else: + # Prefer the most specific type (non-default) if available. + if record.module_type == "cpp" and repo_type != "cpp": + record.module_type = repo_type + return list(seen.values()) + + +def main(argv: Sequence[str]) -> int: + args = parse_args(argv) + topics = args.topics or DEFAULT_TOPICS + token = args.token or os.environ.get("GITHUB_TOKEN") or os.environ.get("GITHUB_API_TOKEN") + client = GitHubClient(token, verbose=args.log) + + manifest = load_manifest(args.manifest) + repos = collect_repositories(client, topics, args.max_pages) + added, updated = merge_repositories(manifest, repos, args.refresh_existing) + if args.dry_run: + print(f"Discovered {len(repos)} repositories (added={added}, updated={updated})") + return 0 + + with open(args.manifest, "w", encoding="utf-8") as handle: + json.dump(manifest, handle, indent=2) + handle.write("\n") + + print(f"Updated manifest {args.manifest}: added {added}, refreshed {updated}") + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:]))