1 Commits

Author SHA1 Message Date
uprightbass360
3b11e23546 refactor and compress code 2025-12-02 21:43:05 -05:00
49 changed files with 2679 additions and 3242 deletions

View File

@@ -14,7 +14,7 @@ COMPOSE_OVERRIDE_WORLDSERVER_DEBUG_LOGGING_ENABLED=0
# Project name
# =====================
# Customize this to match your deployment slug (used for container names/tags)
COMPOSE_PROJECT_NAME=azerothcore-realmmaster
COMPOSE_PROJECT_NAME=azerothcore-stack
# =====================
# Storage & Timezone
@@ -76,17 +76,10 @@ DB_GUARD_VERIFY_INTERVAL_SECONDS=86400
# =====================
STAGE_PATH_MODULE_SQL=${STORAGE_MODULE_SQL_PATH}
# =====================
# Modules rebuild source path
# =====================
# Default AzerothCore source checkout used for module rebuilds
MODULES_REBUILD_SOURCE_PATH=${STORAGE_PATH_LOCAL}/source/azerothcore
# =====================
# SQL Source Overlay
# =====================
SOURCE_DIR=${MODULES_REBUILD_SOURCE_PATH}
AC_SQL_SOURCE_PATH=${MODULES_REBUILD_SOURCE_PATH}/data/sql
AC_SQL_SOURCE_PATH=${STORAGE_LOCAL_SOURCE_PATH}/azerothcore-playerbots/data/sql
# =====================
# Images
@@ -96,15 +89,15 @@ AC_DB_IMPORT_IMAGE=acore/ac-wotlk-db-import:master
AC_AUTHSERVER_IMAGE=acore/ac-wotlk-authserver:master
AC_WORLDSERVER_IMAGE=acore/ac-wotlk-worldserver:master
# Services (Playerbots)
AC_AUTHSERVER_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:authserver-playerbots
AC_WORLDSERVER_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:worldserver-playerbots
AC_AUTHSERVER_IMAGE_PLAYERBOTS=azerothcore-realmmaster:authserver-playerbots
AC_WORLDSERVER_IMAGE_PLAYERBOTS=azerothcore-realmmaster:worldserver-playerbots
# Services (Module Build Tags)
# Images used during module compilation and tagging
AC_AUTHSERVER_IMAGE_MODULES=${COMPOSE_PROJECT_NAME}:authserver-modules-latest
AC_WORLDSERVER_IMAGE_MODULES=${COMPOSE_PROJECT_NAME}:worldserver-modules-latest
AC_AUTHSERVER_IMAGE_MODULES=azerothcore-realmmaster:authserver-modules-latest
AC_WORLDSERVER_IMAGE_MODULES=azerothcore-realmmaster:worldserver-modules-latest
# Client Data
AC_CLIENT_DATA_IMAGE=acore/ac-wotlk-client-data:master
AC_CLIENT_DATA_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:client-data-playerbots
AC_CLIENT_DATA_IMAGE_PLAYERBOTS=uprightbass360/azerothcore-wotlk-playerbots:client-data-Playerbot
# Build artifacts
DOCKER_IMAGE_TAG=master
AC_AUTHSERVER_IMAGE_BASE=acore/ac-wotlk-authserver
@@ -226,8 +219,6 @@ MODULES_REQUIRES_PLAYERBOT_SOURCE=0
# Only set this if you need to override the auto-detected version
# Example: v18.0, v17.0, etc.
CLIENT_DATA_VERSION=
# Client data path for deployment (auto-calculated when left blank)
CLIENT_DATA_PATH=
# =====================
# Server Configuration
@@ -237,10 +228,174 @@ CLIENT_DATA_PATH=
SERVER_CONFIG_PRESET=none
CLIENT_DATA_CACHE_PATH=${STORAGE_PATH_LOCAL}/client-data-cache
# =====================
# Module toggles (0/1)
# =====================
# Enable/disable modules by setting to 1 (enabled) or 0 (disabled)
# Modules are organized by category for easier navigation
# 🤖 Automation
# Playerbot and AI systems
MODULE_NPCBOT_EXTENDED_COMMANDS=0
MODULE_OLLAMA_CHAT=0
# mod-playerbots: Installs SQL/config assets; core functionality is built into playerbot images
MODULE_PLAYERBOTS=0
MODULE_PLAYER_BOT_LEVEL_BRACKETS=0
# ✨ Quality of Life
# Convenience features that improve gameplay experience
MODULE_AOE_LOOT=0
MODULE_AUTO_REVIVE=0
MODULE_FIREWORKS=0
MODULE_INSTANCE_RESET=0
MODULE_LEARN_SPELLS=0
MODULE_SOLO_LFG=0
# ⚔️ Gameplay Enhancement
# Core gameplay improvements and mechanics
MODULE_AUTOBALANCE=0
MODULE_CHALLENGE_MODES=0
MODULE_DUEL_RESET=0
MODULE_DUNGEON_RESPAWN=0
MODULE_HARDCORE_MODE=0
MODULE_HORADRIC_CUBE=0
MODULE_SOLOCRAFT=0
MODULE_STATBOOSTER=0
MODULE_TIME_IS_TIME=0
# 🏪 NPC Services
# Service NPCs that provide player utilities
MODULE_ASSISTANT=0
MODULE_MULTIVENDOR=0
MODULE_NPC_BEASTMASTER=0
MODULE_NPC_BUFFER=0
MODULE_NPC_ENCHANTER=0
MODULE_NPC_FREE_PROFESSIONS=0
# mod-npc-talent-template: Admin commands: .templatenpc create [TemplateName] and .templatenpc reload
MODULE_NPC_TALENT_TEMPLATE=0
MODULE_REAGENT_BANK=0
MODULE_TRANSMOG=0
# ⚡ PvP
# Player vs Player focused modules
MODULE_1V1_ARENA=0
# mod-arena-replay: NPC ID: 98500; known issue: players who were participants experience unusual behavior when watching their own replay
MODULE_ARENA_REPLAY=0
MODULE_GAIN_HONOR_GUARD=0
MODULE_PHASED_DUELS=0
MODULE_PVP_TITLES=0
MODULE_ULTIMATE_FULL_LOOT_PVP=0
# 📈 Progression
# Character and server progression systems
MODULE_DYNAMIC_XP=0
MODULE_INDIVIDUAL_PROGRESSION=0
MODULE_ITEM_LEVEL_UP=0
MODULE_LEVEL_GRANT=0
# mod-progression-system: SQL files cannot be unloaded once executed; requires auto DB updater enabled in worldserver config
MODULE_PROGRESSION_SYSTEM=0
MODULE_PROMOTION_AZEROTHCORE=0
MODULE_WEEKEND_XP=0
# mod-zone-difficulty: Mythicmode NPC 1128001 spawned in raids/heroic dungeons; NPC 1128002 for Mythicmode rewards
MODULE_ZONE_DIFFICULTY=0
# 💰 Economy
# Auction house, trading, and economic systems
MODULE_AHBOT=0
MODULE_BLACK_MARKET_AUCTION_HOUSE=0
MODULE_DYNAMIC_TRADER=0
MODULE_EXCHANGE_NPC=0
MODULE_GLOBAL_MAIL_BANKING_AUCTIONS=0
MODULE_LOTTERY_LUA=0
MODULE_LUA_AH_BOT=0
MODULE_RANDOM_ENCHANTS=0
# 👥 Social
# Social and community features
MODULE_ACTIVE_CHAT=0
MODULE_BOSS_ANNOUNCER=0
MODULE_BREAKING_NEWS=0
MODULE_DISCORD_NOTIFIER=0
MODULE_GLOBAL_CHAT=0
MODULE_TEMP_ANNOUNCEMENTS=0
# 👤 Account-Wide
# Features that apply across all characters on an account
MODULE_ACCOUNTWIDE_SYSTEMS=0
MODULE_ACCOUNT_ACHIEVEMENTS=0
MODULE_ACCOUNT_MOUNTS=0
# 🎨 Customization
# Character and appearance customization
MODULE_ARAC=0
# mod-morphsummon: Allows customization of summoned creature appearances (Warlock demons, Death Knight ghouls, Mage water elementals); NPC ID: 601072
MODULE_MORPHSUMMON=0
MODULE_TRANSMOG_AIO=0
MODULE_WORGOBLIN=0
# 📜 Scripting
# Lua/Eluna scripting frameworks and tools
# mod-aio: Azeroth Interface Override - enables client-server interface communication
MODULE_AIO=0
MODULE_ELUNA=1
MODULE_ELUNA_SCRIPTS=0
MODULE_ELUNA_TS=0
MODULE_EVENT_SCRIPTS=0
# 🔧 Admin Tools
# Server administration and management utilities
MODULE_ANTIFARMING=0
MODULE_CARBON_COPY=0
# mod-keep-out: Requires editing database table mod_mko_map_lock; use .gps command to obtain map and zone IDs
MODULE_KEEP_OUT=0
MODULE_SEND_AND_BIND=0
MODULE_SERVER_AUTO_SHUTDOWN=0
# mod-spell-regulator: WARNING: Custom code changes mandatory before module functions; requires custom hooks from external gist
MODULE_SPELL_REGULATOR=0
MODULE_WHO_LOGGED=0
MODULE_ZONE_CHECK=0
# 💎 Premium/VIP
# Premium account and VIP systems
MODULE_ACORE_SUBSCRIPTIONS=0
# mod-premium: Script must be assigned to an item (like hearthstone) using script name 'premium_account'
MODULE_PREMIUM=0
MODULE_SYSTEM_VIP=0
# 🎮 Mini-Games
# Fun and entertainment features
MODULE_AIO_BLACKJACK=0
MODULE_POCKET_PORTAL=0
# mod-tic-tac-toe: NPC ID: 100155
MODULE_TIC_TAC_TOE=0
# 🏰 Content
# Additional game content and features
MODULE_AZEROTHSHARD=0
MODULE_BG_SLAVERYVALLEY=0
MODULE_GUILDHOUSE=0
MODULE_TREASURE_CHEST_SYSTEM=0
MODULE_WAR_EFFORT=0
# 🎁 Rewards
# Player reward and incentive systems
MODULE_LEVEL_UP_REWARD=0
MODULE_PRESTIGE_DRAFT_MODE=0
MODULE_RECRUIT_A_FRIEND=0
# mod-resurrection-scroll: Requires EnablePlayerSettings to be enabled in worldserver config file
MODULE_RESURRECTION_SCROLL=0
MODULE_REWARD_PLAYED_TIME=0
# 🛠️ Developer Tools
# Development and testing utilities
MODULE_SKELETON_MODULE=0
# =====================
# Rebuild automation
# =====================
AUTO_REBUILD_ON_DEPLOY=0
# Default AzerothCore source checkout used for module rebuilds
MODULES_REBUILD_SOURCE_PATH=${STORAGE_PATH_LOCAL}/source/azerothcore
# =====================
# Source repositories
@@ -297,111 +452,39 @@ KEIRA_DATABASE_HOST=ac-mysql
KEIRA_DATABASE_PORT=3306
# Auto-generated defaults for new modules
MODULE_NPCBOT_EXTENDED_COMMANDS=0
MODULE_OLLAMA_CHAT=0
MODULE_PLAYERBOTS=0
MODULE_PLAYER_BOT_LEVEL_BRACKETS=0
MODULE_AOE_LOOT=0
MODULE_AUTO_REVIVE=0
MODULE_FIREWORKS=0
MODULE_INSTANCE_RESET=0
MODULE_LEARN_SPELLS=0
MODULE_SOLO_LFG=0
MODULE_AUTOBALANCE=0
MODULE_DUEL_RESET=0
MODULE_HARDCORE_MODE=0
MODULE_HORADRIC_CUBE=0
MODULE_SOLOCRAFT=0
MODULE_TIME_IS_TIME=0
MODULE_ASSISTANT=0
MODULE_NPC_BEASTMASTER=0
MODULE_NPC_BUFFER=0
MODULE_NPC_ENCHANTER=0
MODULE_NPC_FREE_PROFESSIONS=0
MODULE_NPC_TALENT_TEMPLATE=0
MODULE_REAGENT_BANK=0
MODULE_TRANSMOG=0
MODULE_1V1_ARENA=0
MODULE_ARENA_REPLAY=0
MODULE_GAIN_HONOR_GUARD=0
MODULE_PHASED_DUELS=0
MODULE_PVP_TITLES=0
MODULE_ULTIMATE_FULL_LOOT_PVP=0
MODULE_DYNAMIC_XP=0
MODULE_INDIVIDUAL_PROGRESSION=0
MODULE_ITEM_LEVEL_UP=0
MODULE_PROGRESSION_SYSTEM=0
MODULE_PROMOTION_AZEROTHCORE=0
MODULE_WEEKEND_XP=0
MODULE_ZONE_DIFFICULTY=0
MODULE_DYNAMIC_TRADER=0
MODULE_EXCHANGE_NPC=0
MODULE_GLOBAL_MAIL_BANKING_AUCTIONS=0
MODULE_LOTTERY_LUA=0
MODULE_LUA_AH_BOT=0
MODULE_RANDOM_ENCHANTS=0
MODULE_ACTIVE_CHAT=0
MODULE_BOSS_ANNOUNCER=0
MODULE_BREAKING_NEWS=0
MODULE_DISCORD_NOTIFIER=0
MODULE_GLOBAL_CHAT=0
MODULE_TEMP_ANNOUNCEMENTS=0
MODULE_ACCOUNTWIDE_SYSTEMS=0
MODULE_ACCOUNT_ACHIEVEMENTS=0
MODULE_ACCOUNT_MOUNTS=0
MODULE_ARAC=0
MODULE_MORPHSUMMON=0
MODULE_TRANSMOG_AIO=0
MODULE_WORGOBLIN=0
MODULE_AIO=0
MODULE_ELUNA=1
MODULE_ELUNA_SCRIPTS=0
MODULE_ELUNA_TS=0
MODULE_EVENT_SCRIPTS=0
MODULE_ANTIFARMING=0
MODULE_CARBON_COPY=0
MODULE_KEEP_OUT=0
MODULE_SEND_AND_BIND=0
MODULE_SERVER_AUTO_SHUTDOWN=0
MODULE_SPELL_REGULATOR=0
MODULE_WHO_LOGGED=0
MODULE_ZONE_CHECK=0
MODULE_PREMIUM=0
MODULE_SYSTEM_VIP=0
MODULE_AIO_BLACKJACK=0
MODULE_TIC_TAC_TOE=0
MODULE_BG_SLAVERYVALLEY=0
MODULE_GUILDHOUSE=0
MODULE_TREASURE_CHEST_SYSTEM=0
MODULE_WAR_EFFORT=0
MODULE_LEVEL_UP_REWARD=0
MODULE_PRESTIGE_DRAFT_MODE=0
MODULE_RECRUIT_A_FRIEND=0
MODULE_RESURRECTION_SCROLL=0
MODULE_REWARD_PLAYED_TIME=0
MODULE_SKELETON_MODULE=0
MODULE_1V1_PVP_SYSTEM=0
MODULE_ACI=0
MODULE_ACORE_API=0
MODULE_ACORE_BG_END_ANNOUNCER=0
MODULE_ACORE_BOX=0
MODULE_ACORE_CLIENT=0
MODULE_ACORE_CMS=0
MODULE_ACORE_ELUNATEST=0
MODULE_ACORE_LINUX_RESTARTER=0
MODULE_ACORE_LUA_UNLIMITED_AMMO=0
MODULE_ACORE_LXD_IMAGE=0
MODULE_ACORE_MALL=0
MODULE_ACORE_MINI_REG_PAGE=0
MODULE_ACORE_NODE_SERVER=0
MODULE_ACORE_PWA=0
MODULE_ACORE_SOD=0
MODULE_ACORE_SUMMONALL=0
MODULE_ACORE_TILEMAP=0
MODULE_ACORE_ZONEDEBUFF=0
MODULE_ACREBUILD=0
MODULE_ADDON_FACTION_FREE_UNIT_POPUP=0
MODULE_AOE_LOOT_MERGE=0
MODULE_APAW=0
MODULE_ARENA_SPECTATOR=0
MODULE_ARENA_STATS=0
MODULE_ATTRIBOOST=0
MODULE_AUTO_CHECK_RESTART=0
MODULE_AZEROTHCOREADMIN=0
MODULE_AZEROTHCOREDISCORDBOT=0
MODULE_AZEROTHCORE_ADDITIONS=0
MODULE_AZEROTHCORE_ALL_STACKABLES_200=0
MODULE_AZEROTHCORE_ANSIBLE=0
MODULE_AZEROTHCORE_ARMORY=0
MODULE_AZEROTHCORE_LUA_ARENA_MASTER_COMMAND=0
MODULE_AZEROTHCORE_LUA_DEMON_MORPHER=0
MODULE_AZEROTHCORE_PASSRESET=0
@@ -411,25 +494,41 @@ MODULE_AZEROTHCORE_TRIVIA_SYSTEM=0
MODULE_AZEROTHCORE_WEBSITE=0
MODULE_AZEROTHCORE_WOWHEAD_MOD_LUA=0
MODULE_AZTRAL_AIRLINES=0
MODULE_BGQUEUECHECKER=0
MODULE_BG_QUEUE_ABUSER_VIEWER=0
MODULE_BLIZZLIKE_TELES=0
MODULE_BREAKINGNEWSOVERRIDE=0
MODULE_CLASSIC_MODE=0
MODULE_CODEBASE=0
MODULE_CONFIG_RATES=0
MODULE_DEVJOESTAR=0
MODULE_ELUNA_WOW_SCRIPTS=0
MODULE_EXTENDEDXP=0
MODULE_EXTENDED_HOLIDAYS_LUA=0
MODULE_FFAFIX=0
MODULE_FLAG_CHECKER=0
MODULE_GUILDBANKTABFEEFIXER=0
MODULE_HARDMODE=0
MODULE_HEARTHSTONE_COOLDOWNS=0
MODULE_ITEMBROADCASTGUILDCHAT=0
MODULE_KARGATUM_SYSTEM=0
MODULE_KEIRA3=0
MODULE_LOTTERY_CHANCE_INSTANT=0
MODULE_LUA_AIO_MODRATE_EXP=0
MODULE_LUA_COMMAND_PLUS=0
MODULE_LUA_ITEMUPGRADER_TEMPLATE=0
MODULE_LUA_NOTONLY_RANDOMMORPHER=0
MODULE_LUA_PARAGON_ANNIVERSARY=0
MODULE_LUA_PVP_TITLES_RANKING_SYSTEM=0
MODULE_LUA_SCRIPTS=0
MODULE_LUA_SUPER_BUFFERNPC=0
MODULE_LUA_VIP=0
MODULE_MOD_ACCOUNTBOUND=0
MODULE_MOD_ACCOUNT_VANITY_PETS=0
MODULE_MOD_ACTIVATEZONES=0
MODULE_MOD_AH_BOT_PLUS=0
MODULE_MOD_ALPHA_REWARDS=0
MODULE_MOD_AOE_LOOT=0
MODULE_MOD_APPRECIATION=0
MODULE_MOD_ARENA_TIGERSPEAK=0
MODULE_MOD_ARENA_TOLVIRON=0
@@ -440,29 +539,44 @@ MODULE_MOD_BG_ITEM_REWARD=0
MODULE_MOD_BG_REWARD=0
MODULE_MOD_BG_TWINPEAKS=0
MODULE_MOD_BIENVENIDA=0
MODULE_MOD_BLACK_MARKET=0
MODULE_MOD_BRAWLERS_GUILD=0
MODULE_MOD_BUFF_COMMAND=0
MODULE_MOD_CFPVE=0
MODULE_MOD_CHANGEABLESPAWNRATES=0
MODULE_MOD_CHARACTER_SERVICES=0
MODULE_MOD_CHARACTER_TOOLS=0
MODULE_MOD_CHAT_TRANSMITTER=0
MODULE_MOD_CHROMIE_XP=0
MODULE_MOD_CONGRATS_ON_LEVEL=0
MODULE_MOD_COSTUMES=0
MODULE_MOD_CRAFTSPEED=0
MODULE_MOD_CTA_SWITCH=0
MODULE_MOD_DEAD_MEANS_DEAD=0
MODULE_MOD_DEATHROLL_AIO=0
MODULE_MOD_DEMONIC_PACT_CLASSIC=0
MODULE_MOD_DESERTION_WARNINGS=0
MODULE_MOD_DISCORD_ANNOUNCE=0
MODULE_MOD_DISCORD_WEBHOOK=0
MODULE_MOD_DMF_SWITCH=0
MODULE_MOD_DUNGEONMASTER=0
MODULE_MOD_DUNGEON_SCALE=0
MODULE_MOD_DYNAMIC_LOOT_RATES=0
MODULE_MOD_DYNAMIC_RESURRECTIONS=0
MODULE_MOD_ENCOUNTER_LOGS=0
MODULE_MOD_FACTION_FREE=0
MODULE_MOD_FIRSTLOGIN_AIO=0
MODULE_MOD_FLIGHTMASTER_WHISTLE=0
MODULE_MOD_FORTIS_AUTOBALANCE=0
MODULE_MOD_GAME_STATE_API=0
MODULE_MOD_GEDDON_BINDING_SHARD=0
MODULE_MOD_GHOST_SPEED=0
MODULE_MOD_GLOBALCHAT=0
MODULE_MOD_GM_COMMANDS=0
MODULE_MOD_GOMOVE=0
MODULE_MOD_GROWNUP=0
MODULE_MOD_GUILDFUNDS=0
MODULE_MOD_GUILD_VILLAGE=0
MODULE_MOD_GUILD_ZONE_SYSTEM=0
MODULE_MOD_HARDCORE=0
MODULE_MOD_HARDCORE_MAKGORA=0
@@ -471,21 +585,32 @@ MODULE_MOD_HIGH_RISK_SYSTEM=0
MODULE_MOD_HUNTER_PET_STORAGE=0
MODULE_MOD_IMPROVED_BANK=0
MODULE_MOD_INCREMENT_CACHE_VERSION=0
MODULE_MOD_INDIVIDUAL_XP=0
MODULE_MOD_INFLUXDB=0
MODULE_MOD_INSTANCE_TOOLS=0
MODULE_MOD_IP2NATION=0
MODULE_MOD_IP_TRACKER=0
MODULE_MOD_ITEMLEVEL=0
MODULE_MOD_ITEM_UPGRADE=0
MODULE_MOD_JUNK_TO_GOLD=0
MODULE_MOD_LEARNSPELLS=0
MODULE_MOD_LEECH=0
MODULE_MOD_LEVEL_15_BOOST=0
MODULE_MOD_LEVEL_ONE_MOUNTS=0
MODULE_MOD_LEVEL_REWARDS=0
MODULE_MOD_LOGIN_REWARDS=0
MODULE_MOD_LOW_LEVEL_ARENA=0
MODULE_MOD_LOW_LEVEL_RBG=0
MODULE_MOD_MISSING_OBJECTIVES=0
MODULE_MOD_MONEY_FOR_KILLS=0
MODULE_MOD_MOUNTS_ON_ACCOUNT=0
MODULE_MOD_MOUNT_REQUIREMENTS=0
MODULE_MOD_MULTI_VENDOR=0
MODULE_MOD_MYTHIC_PLUS=0
MODULE_MOD_NOCLIP=0
MODULE_MOD_NORDF=0
MODULE_MOD_NOTIFY_MUTED=0
MODULE_MOD_NO_FARMING=0
MODULE_MOD_NO_HEARTHSTONE_COOLDOWN=0
MODULE_MOD_NPC_ALL_MOUNTS=0
MODULE_MOD_NPC_CODEBOX=0
@@ -495,64 +620,90 @@ MODULE_MOD_NPC_PROMOTION=0
MODULE_MOD_NPC_SERVICES=0
MODULE_MOD_NPC_SPECTATOR=0
MODULE_MOD_NPC_SUBCLASS=0
MODULE_MOD_OBJSCALE=0
MODULE_MOD_OLLAMA_BOT_BUDDY=0
MODULE_MOD_ONY_NAXX_LOGOUT_TELEPORT=0
MODULE_MOD_PEACEKEEPER=0
MODULE_MOD_PETEQUIP=0
MODULE_MOD_PREMIUM=0
MODULE_MOD_PREMIUM_LIB=0
MODULE_MOD_PROFESSION_EXPERIENCE=0
MODULE_MOD_PROFSPECS=0
MODULE_MOD_PTR_TEMPLATE=0
MODULE_MOD_PVPSCRIPT=0
MODULE_MOD_PVPSTATS_ANNOUNCER=0
MODULE_MOD_PVP_ZONES=0
MODULE_MOD_QUEST_LOOT_PARTY=0
MODULE_MOD_QUEST_STATUS=0
MODULE_MOD_QUEUE_LIST_CACHE=0
MODULE_MOD_QUICKBALANCE=0
MODULE_MOD_QUICK_RESPAWN=0
MODULE_MOD_RACIAL_TRAIT_SWAP=0
MODULE_MOD_RARE_DROPS=0
MODULE_MOD_RDF_EXPANSION=0
MODULE_MOD_REAL_ONLINE=0
MODULE_MOD_RECRUIT_FRIEND=0
MODULE_MOD_REFORGING=0
MODULE_MOD_RESET_RAID_COOLDOWNS=0
MODULE_MOD_REWARD_PLAYED_TIME_IMPROVED=0
MODULE_MOD_REWARD_SHOP=0
MODULE_MOD_SELL_ITEMS=0
MODULE_MOD_SETXPBAR=0
MODULE_MOD_SHARE_MOUNTS=0
MODULE_MOD_SPAWNPOINTS=0
MODULE_MOD_SPEC_REWARD=0
MODULE_MOD_SPELLREGULATOR=0
MODULE_MOD_SPONSORSHIP=0
MODULE_MOD_STARTER_GUILD=0
MODULE_MOD_STARTER_WANDS=0
MODULE_MOD_STARTING_PET=0
MODULE_MOD_STREAMS=0
MODULE_MOD_SWIFT_TRAVEL_FORM=0
MODULE_MOD_TALENTBUTTON=0
MODULE_MOD_TRADE_ITEMS_FILTER=0
MODULE_MOD_TREASURE=0
MODULE_MOD_TRIAL_OF_FINALITY=0
MODULE_MOD_VANILLA_NAXXRAMAS=0
MODULE_MOD_WARLOCK_PET_RENAME=0
MODULE_MOD_WEAPON_VISUAL=0
MODULE_MOD_WEEKENDBONUS=0
MODULE_MOD_WEEKEND_XP=0
MODULE_MOD_WHOLOGGED=0
MODULE_MORZA_ISLAND_ARAXIA_SERVER=0
MODULE_MPQ_TOOLS_OSX=0
MODULE_MYSQL_TOOLS=0
MODULE_NODEROUTER=0
MODULE_OPENPROJECTS=0
MODULE_PLAYERTELEPORT=0
MODULE_PORTALS_IN_ALL_CAPITALS=0
MODULE_PRESTIGE=0
MODULE_PRESTIGIOUS=0
MODULE_PVPSTATS=0
MODULE_RAIDTELEPORTER=0
MODULE_RECACHE=0
MODULE_RECYCLEDITEMS=0
MODULE_REWARD_SYSTEM=0
MODULE_SAHTOUTCMS=0
MODULE_SERVER_STATUS=0
MODULE_SETXPBAR=0
MODULE_SPELLSCRIPT_REFACTOR_TOOL=0
MODULE_SQL_NPC_TELEPORTER=0
MODULE_STATBOOSTERREROLLER=0
MODULE_STRAPI_AZEROTHCORE=0
MODULE_TBC_RAID_HP_RESTORATION=0
MODULE_TELEGRAM_AUTOMATED_DB_BACKUP=0
MODULE_TOOL_TC_MIGRATION=0
MODULE_TRANSMOG_ADDONS=0
MODULE_UPDATE_MOB_LEVEL_TO_PLAYER_AND_RANDOM_ITEM_STATS=0
MODULE_UPDATE_MODULE_CONFS=0
MODULE_WEB_CHARACTER_MIGRATION_TOOL=0
MODULE_WEEKLY_ARMOR_VENDOR_BLACK_MARKET=0
MODULE_WORLD_BOSS_RANK=0
MODULE_WOWDATABASEEDITOR=0
MODULE_WOWLAUNCHER_DELPHI=0
MODULE_WOWSIMS_TO_COMMANDS=0
MODULE_WOW_CLIENT_PATCHER=0
MODULE_WOW_ELUNA_TS_MODULE=0
MODULE_WOW_SERVER_RELAY=0
MODULE_WOW_STATISTICS=0
MODULE_WRATH_OF_THE_VANILLA=0
MODULE_MOD_BOTS_LOGIN_FIX=0
MODULE_MOD_MATERIAL_BANK=0
MODULE_MOD_PROGRESSION_BLIZZLIKE=0
MODULE_MOD_PYTHON_ENGINE=0
MODULE_WRATH_OF_THE_VANILLA_V2=0

View File

@@ -17,31 +17,13 @@ jobs:
with:
python-version: '3.11'
- name: Configure git
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
- name: Update manifest from GitHub topics
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
python3 scripts/python/update_module_manifest.py --log
- name: Check for changes
id: changes
run: |
if git diff --quiet; then
echo "changed=false" >> $GITHUB_OUTPUT
echo "No changes detected in manifest or template files"
else
echo "changed=true" >> $GITHUB_OUTPUT
echo "Changes detected:"
git diff --name-only
fi
- name: Create Pull Request with changes
if: steps.changes.outputs.changed == 'true'
uses: peter-evans/create-pull-request@v5
with:
commit-message: 'chore: sync module manifest'

2
.gitignore vendored
View File

@@ -2,7 +2,6 @@ database-import/*.sql
database-import/*.sql.gz
database-import/*/
database-import/ImportBackup*/
db_*/
source/*
local-data-tools/
changelogs/
@@ -13,7 +12,6 @@ images/
node_modules/
.mcp*/
scripts/__pycache__/*
scripts/bash/__pycache__/*
scripts/python/__pycache__/*
.env
package-lock.json

87
CHANGELOG.md Normal file
View File

@@ -0,0 +1,87 @@
# Changelog
## [2025-11-09] - Recent Changes
### ✨ Features
#### Backup System Enhancements
- **Manual Backup Support**: Added `manual-backup.sh` script (92 lines) enabling on-demand database backups through the ac-backup container
- **Backup Permission Fixes**: Resolved Docker volume permission issues with backup operations
- **Container User Configuration**: Backup operations now run as proper container user to avoid permission conflicts
#### Remote Deployment
- **Auto Deploy Option**: Added remote auto-deployment functionality to `deploy.sh` (36 additional lines) for automated server provisioning
#### Configuration Management System
- **Database/Config Import**: Major new feature with 1,405+ lines of code across 15 files
- Added `apply-config.py` (323 lines) for dynamic server configuration
- Created `configure-server.sh` (162 lines) for server setup automation
- Implemented `import-database-files.sh` (68 lines) for database initialization
- Added `parse-config-presets.py` (92 lines) for configuration templating
- **Configuration Presets**: 5 new server preset configurations
- `blizzlike.conf` - Authentic Blizzard-like experience
- `casual-pve.conf` - Relaxed PvE gameplay
- `fast-leveling.conf` - Accelerated character progression
- `hardcore-pvp.conf` - Competitive PvP settings
- `none.conf` - Minimal configuration baseline
- **Dynamic Server Overrides**: `server-overrides.conf` (134 lines) for customizable server parameters
- **Comprehensive Config Documentation**: `CONFIG_MANAGEMENT.md` (279 lines) detailing the entire configuration system
#### Infrastructure Improvements
- **MySQL Exposure Toggle**: Optional MySQL port exposure for external database access
- **Client Data Management**: Automatic client data detection, download, and binding with version detection
- **Dynamic Docker Overrides**: Flexible compose override system for modular container configurations
- **Module Profile System**: Structured module management with preset profiles
### 🏗️ Refactoring
#### Script Organization
- **Directory Restructure**: Reorganized all scripts into `scripts/bash/` and `scripts/python/` directories (40 files moved/modified)
- **Project Naming**: Added centralized project name management with `project_name.sh`
- **Module Manifest Rename**: Moved `modules.json``module-manifest.json` for clarity
### 🐛 Bug Fixes
#### Container Improvements
- **Client Data Container**: Enhanced with 7zip support, root access during extraction, and ownership fixes
- **Permission Resolution**: Fixed file ownership issues in client data extraction process
- **Path Updates**: Corrected deployment paths and script references after reorganization
### 📚 Documentation
#### Major Documentation Overhaul
- **Modular Documentation**: Split massive README into focused documents (1,500+ lines reorganized)
- `docs/GETTING_STARTED.md` (467 lines) - Setup and initial configuration
- `docs/MODULES.md` (264 lines) - Module management and customization
- `docs/SCRIPTS.md` (404 lines) - Script reference and automation
- `docs/ADVANCED.md` (207 lines) - Advanced configuration topics
- `docs/TROUBLESHOOTING.md` (127 lines) - Common issues and solutions
- **README Streamlining**: Reduced main README from 1,200+ to focused overview
- **Script Documentation**: Updated script references and usage examples throughout
### 🔧 Technical Changes
#### Development Experience
- **Setup Enhancements**: Improved `setup.sh` with better error handling and configuration options (66 lines added)
- **Status Monitoring**: Enhanced `status.sh` with better container and service monitoring
- **Build Process**: Updated build scripts with new directory structure and module handling
- **Cleanup Operations**: Improved cleanup scripts with proper path handling
#### DevOps & Deployment
- **Remote Cleanup**: Enhanced remote server cleanup and temporary file management
- **Network Binding**: Improved container networking and port management
- **Import Folder**: Added dedicated import directory structure
- **Development Onboarding**: Streamlined developer setup process
---
### Migration Notes
- Scripts have moved from `scripts/` to `scripts/bash/` and `scripts/python/`
- Module configuration is now in `config/module-manifest.json`
- New environment variables added for MySQL exposure and client data management
- Configuration presets are available in `config/presets/`
### Breaking Changes
- Script paths have changed due to reorganization
- Module manifest file has been renamed
- Some environment variables have been added/modified

372
CLEANUP_TODO.md Normal file
View File

@@ -0,0 +1,372 @@
# AzerothCore RealmMaster - Cleanup TODO
## Overview
This document outlines systematic cleanup opportunities using the proven methodology from our successful consolidation. Each phase must be validated and tested incrementally without breaking existing functionality.
## Methodology
1. **Analyze** - Map dependencies and usage patterns
2. **Consolidate** - Create shared libraries/templates
3. **Replace** - Update scripts to use centralized versions
4. **Test** - Validate each change incrementally
5. **Document** - Track changes and dependencies
---
## Phase 1: Complete Script Function Consolidation
**Priority: HIGH** | **Risk: LOW** | **Impact: HIGH**
### Status
**Completed**: Master scripts (deploy.sh, build.sh, cleanup.sh) + 4 critical scripts
🔄 **Remaining**: 10+ scripts with duplicate logging functions
### Remaining Scripts to Consolidate
```bash
# Root level scripts
./changelog.sh # Has: info(), warn(), err()
./update-latest.sh # Has: info(), ok(), warn(), err()
# Backup system scripts
./scripts/bash/backup-export.sh # Has: info(), ok(), warn(), err()
./scripts/bash/backup-import.sh # Has: info(), ok(), warn(), err()
# Database scripts
./scripts/bash/db-guard.sh # Has: info(), warn(), err()
./scripts/bash/db-health-check.sh # Has: info(), ok(), warn(), err()
# Module & verification scripts
./scripts/bash/verify-sql-updates.sh # Has: info(), warn(), err()
./scripts/bash/manage-modules.sh # Has: info(), ok(), warn(), err()
./scripts/bash/repair-storage-permissions.sh # Has: info(), warn(), err()
./scripts/bash/test-phase1-integration.sh # Has: info(), ok(), warn(), err()
```
### Implementation Plan
**Step 1.1**: Consolidate Root Level Scripts (changelog.sh, update-latest.sh)
- Add lib/common.sh sourcing with error handling
- Remove duplicate function definitions
- Test functionality with `--help` flags
**Step 1.2**: Consolidate Backup System Scripts
- Update backup-export.sh and backup-import.sh
- Ensure backup operations still work correctly
- Test with dry-run flags where available
**Step 1.3**: Consolidate Database Scripts
- Update db-guard.sh and db-health-check.sh
- **CRITICAL**: These run in containers - verify mount paths work
- Test with existing database connections
**Step 1.4**: Consolidate Module & Verification Scripts
- Update manage-modules.sh, verify-sql-updates.sh, repair-storage-permissions.sh
- Test module staging and SQL verification workflows
- Verify test-phase1-integration.sh still functions
### Validation Tests
```bash
# Test each script category after consolidation
./changelog.sh --help
./update-latest.sh --help
./scripts/bash/backup-export.sh --dry-run
./scripts/bash/manage-modules.sh --list
```
---
## Phase 2: Docker Compose YAML Anchor Completion
**Priority: HIGH** | **Risk: MEDIUM** | **Impact: HIGH**
### Status
**Completed**: Basic YAML anchors, 2 authserver services consolidated
🔄 **Remaining**: 4 worldserver services, database services, volume patterns
### Current Docker Compose Analysis
```yaml
# Services needing consolidation:
- ac-worldserver-standard # ~45 lines → can reduce to ~10
- ac-worldserver-playerbots # ~45 lines → can reduce to ~10
- ac-worldserver-modules # ~45 lines → can reduce to ~10
- ac-authserver-modules # ~30 lines → can reduce to ~8
# Database services with repeated patterns:
- ac-db-import # Repeated volume mounts
- ac-db-guard # Similar environment variables
- ac-db-init # Similar MySQL connection patterns
# Volume mount patterns repeated 15+ times:
- ${STORAGE_CONFIG_PATH}:/azerothcore/env/dist/etc
- ${STORAGE_LOGS_PATH}:/azerothcore/logs
- ${BACKUP_PATH}:/backups
```
### Implementation Plan
**Step 2.1**: Complete Worldserver Service Consolidation
- Extend x-worldserver-common anchor to cover all variants
- Consolidate ac-worldserver-standard, ac-worldserver-playerbots, ac-worldserver-modules
- Test each Docker profile: `docker compose --profile services-standard config`
**Step 2.2**: Database Services Consolidation
- Create x-database-common anchor for shared database configurations
- Create x-database-volumes anchor for repeated volume patterns
- Update ac-db-import, ac-db-guard, ac-db-init services
**Step 2.3**: Complete Authserver Consolidation
- Consolidate remaining ac-authserver-modules service
- Verify all three profiles work: standard, playerbots, modules
### Validation Tests
```bash
# Test all profiles generate valid configurations
docker compose --profile services-standard config --quiet
docker compose --profile services-playerbots config --quiet
docker compose --profile services-modules config --quiet
# Test actual deployment (non-destructive)
docker compose --profile services-standard up --dry-run
```
---
## Phase 3: Utility Function Libraries
**Priority: MEDIUM** | **Risk: MEDIUM** | **Impact: MEDIUM**
### Status
**Completed**: All three utility libraries created and tested
**Completed**: Integration with backup-import.sh as proof of concept
🔄 **Remaining**: Update remaining 14+ scripts to use new libraries
### Created Libraries
**✅ scripts/bash/lib/mysql-utils.sh** - COMPLETED
- MySQL connection management: `mysql_test_connection()`, `mysql_wait_for_connection()`
- Query execution: `mysql_exec_with_retry()`, `mysql_query()`, `docker_mysql_query()`
- Database utilities: `mysql_database_exists()`, `mysql_get_table_count()`
- Backup/restore: `mysql_backup_database()`, `mysql_restore_database()`
- Configuration: `mysql_validate_configuration()`, `mysql_print_configuration()`
**✅ scripts/bash/lib/docker-utils.sh** - COMPLETED
- Container management: `docker_get_container_status()`, `docker_wait_for_container_state()`
- Execution: `docker_exec_with_retry()`, `docker_is_container_running()`
- Project management: `docker_get_project_name()`, `docker_list_project_containers()`
- Image operations: `docker_get_container_image()`, `docker_pull_image_with_retry()`
- Compose integration: `docker_compose_validate()`, `docker_compose_deploy()`
- System utilities: `docker_check_daemon()`, `docker_cleanup_system()`
**✅ scripts/bash/lib/env-utils.sh** - COMPLETED
- Environment management: `env_read_with_fallback()`, `env_read_typed()`, `env_update_value()`
- Path utilities: `path_resolve_absolute()`, `file_ensure_writable_dir()`
- File operations: `file_create_backup()`, `file_set_permissions()`
- Configuration: `config_read_template_value()`, `config_validate_env()`
- System detection: `system_detect_os()`, `system_check_requirements()`
### Integration Status
**✅ Proof of Concept**: backup-import.sh updated with fallback compatibility
- Uses new utility functions when available
- Maintains backward compatibility with graceful fallbacks
- Tested and functional
### Remaining Implementation
**Step 3.4**: Update High-Priority Scripts
- backup-export.sh: Use mysql-utils and env-utils functions
- db-guard.sh: Use mysql-utils for database operations
- deploy-tools.sh: Use docker-utils for container management
- verify-deployment.sh: Use docker-utils for status checking
**Step 3.5**: Update Database Scripts
- db-health-check.sh: Use mysql-utils for health validation
- db-import-conditional.sh: Use mysql-utils and env-utils
- manual-backup.sh: Use mysql-utils backup functions
**Step 3.6**: Update Deployment Scripts
- migrate-stack.sh: Use docker-utils for remote operations
- stage-modules.sh: Use env-utils for path management
- rebuild-with-modules.sh: Use docker-utils for build operations
### Validation Tests - COMPLETED ✅
```bash
# Test MySQL utilities
source scripts/bash/lib/mysql-utils.sh
mysql_print_configuration # ✅ PASSED
# Test Docker utilities
source scripts/bash/lib/docker-utils.sh
docker_print_system_info # ✅ PASSED
# Test Environment utilities
source scripts/bash/lib/env-utils.sh
env_utils_validate # ✅ PASSED
# Test integrated script
./backup-import.sh --help # ✅ PASSED with new libraries
```
### Next Steps
- Continue with Step 3.4: Update backup-export.sh, db-guard.sh, deploy-tools.sh
- Implement progressive rollout with testing after each script update
- Complete remaining 11 scripts in dependency order
---
## Phase 4: Error Handling Standardization
**Priority: MEDIUM** | **Risk: LOW** | **Impact: MEDIUM**
### Analysis
**Current State**: Mixed error handling patterns across scripts
```bash
# Found patterns:
set -e # 45 scripts
set -euo pipefail # 23 scripts
set -eu # 8 scripts
(no error handling) # 12 scripts
```
### Implementation Plan
**Step 4.1**: Standardize Error Handling
- Add `set -euo pipefail` to all scripts where safe
- Add error traps for cleanup in critical scripts
- Implement consistent exit codes
**Step 4.2**: Add Script Validation Framework
- Create validation helper functions
- Add dependency checking to critical scripts
- Implement graceful degradation where possible
### Target Pattern
```bash
#!/bin/bash
set -euo pipefail
# Error handling setup
trap 'echo "❌ Error on line $LINENO" >&2' ERR
trap 'cleanup_on_exit' EXIT
# Source libraries with validation
source_lib_or_exit() {
local lib_path="$1"
if ! source "$lib_path" 2>/dev/null; then
echo "❌ FATAL: Cannot load $lib_path" >&2
exit 1
fi
}
```
---
## Phase 5: Configuration Template Consolidation
**Priority: LOW** | **Risk: LOW** | **Impact: LOW**
### Analysis
**Found**: 71 instances of duplicate color definitions across scripts
**Found**: Multiple .env template patterns that could be standardized
### Implementation Plan
**Step 5.1**: Color Definition Consolidation
- Ensure all scripts use lib/common.sh colors exclusively
- Remove remaining duplicate color definitions
- Add color theme support (optional)
**Step 5.2**: Configuration Template Cleanup
- Consolidate environment variable patterns
- Create shared configuration validation
- Standardize default value patterns
---
## Implementation Priority Order
### **Week 1: High Impact, Low Risk**
- [ ] Phase 1.1-1.2: Consolidate remaining root and backup scripts
- [ ] Phase 2.1: Complete worldserver YAML anchor consolidation
- [ ] Validate: All major scripts and Docker profiles work
### **Week 2: Complete Core Consolidation**
- [ ] Phase 1.3-1.4: Consolidate database and module scripts
- [ ] Phase 2.2-2.3: Complete database service and authserver consolidation
- [ ] Validate: Full deployment pipeline works end-to-end
### **Week 3: Utility Libraries**
- [ ] Phase 3.1: Create and implement MySQL utility library
- [ ] Phase 3.2: Create and implement Docker utility library
- [ ] Validate: Scripts using new libraries function correctly
### **Week 4: Polish and Standardization**
- [ ] Phase 3.3: Complete environment utility library
- [ ] Phase 4.1-4.2: Standardize error handling
- [ ] Phase 5.1-5.2: Final cleanup of colors and configs
- [ ] Validate: Complete system testing
---
## Validation Framework
### **Incremental Testing**
Each phase must pass these tests before proceeding:
**Script Functionality Tests:**
```bash
# Master scripts
./deploy.sh --help && ./build.sh --help && ./cleanup.sh --help
# Docker compose validation
docker compose config --quiet
# Profile validation
for profile in services-standard services-playerbots services-modules; do
docker compose --profile $profile config --quiet
done
```
**Integration Tests:**
```bash
# End-to-end validation (non-destructive)
./deploy.sh --profile services-standard --dry-run --no-watch
./scripts/bash/verify-deployment.sh --profile services-standard
```
**Regression Prevention:**
- Git commit after each completed phase
- Tag successful consolidations
- Maintain rollback procedures
---
## Risk Mitigation
### **Container Script Dependencies**
- **High Risk**: Scripts mounted into containers (db-guard.sh, backup-scheduler.sh)
- **Mitigation**: Test container mounting before consolidating
- **Validation**: Verify scripts work inside container environment
### **Remote Deployment Impact**
- **Medium Risk**: SSH deployment scripts (migrate-stack.sh)
- **Mitigation**: Test remote deployment on non-production host
- **Validation**: Verify remote script sourcing works correctly
### **Docker Compose Version Compatibility**
- **Medium Risk**: Advanced YAML anchors may not work on older versions
- **Mitigation**: Add version detection and warnings
- **Validation**: Test on minimum supported Docker Compose version
---
## Success Metrics
### **Quantitative Goals**
- Reduce duplicate logging functions from 14 → 0 scripts
- Reduce Docker compose file from ~1000 → ~600 lines
- Reduce color definitions from 71 → 1 centralized location
- Consolidate MySQL connection patterns from 22 → 1 library
### **Qualitative Goals**
- Single source of truth for common functionality
- Consistent user experience across all scripts
- Maintainable and extensible architecture
- Clear dependency relationships
- Robust error handling and validation
### **Completion Criteria**
- [ ] All scripts source centralized libraries exclusively
- [ ] No duplicate function definitions remain
- [ ] Docker compose uses YAML anchors for all repeated patterns
- [ ] Comprehensive test suite validates all functionality
- [ ] Documentation updated to reflect new architecture

View File

@@ -130,13 +130,6 @@ For diagnostic procedures, common issues, and backup system documentation, see *
This project builds upon:
- **[AzerothCore](https://github.com/azerothcore/azerothcore-wotlk)** - Core server application
- **[AzerothCore Module Community](https://github.com/azerothcore)** - Enhanced gameplay modules
- **[acore-docker](https://github.com/azerothcore/acore-docker)** - Inspiration for containerized deployment
- **[mod-playerbots](https://github.com/mod-playerbots/azerothcore-wotlk)** - Advanced playerbot functionality
- **All module creators** - Making amazing things every day
### Community & Support
- **[AzerothCore Discord](https://discord.gg/gkt4y2x)** - Join the community for support and discussions
- **[GitHub Issues](https://github.com/uprightbass360/AzerothCore-RealmMaster/issues)** - Report build or deployment issues here
#### Key Features
-**Fully Automated Setup** - Interactive configuration and deployment

View File

@@ -9,6 +9,13 @@ set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ENV_PATH="$ROOT_DIR/.env"
TEMPLATE_PATH="$ROOT_DIR/.env.template"
# Source common library with proper error handling
if ! source "$ROOT_DIR/scripts/bash/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load $ROOT_DIR/scripts/bash/lib/common.sh" >&2
echo "This library is required for build.sh to function." >&2
exit 1
fi
source "$ROOT_DIR/scripts/bash/project_name.sh"
# Default project name (read from .env or template)
@@ -17,11 +24,7 @@ ASSUME_YES=0
FORCE_REBUILD=0
SKIP_SOURCE_SETUP=0
CUSTOM_SOURCE_PATH=""
BLUE='\033[0;34m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; RED='\033[0;31m'; NC='\033[0m'
info(){ printf '%b\n' "${BLUE} $*${NC}"; }
ok(){ printf '%b\n' "${GREEN}$*${NC}"; }
warn(){ printf '%b\n' "${YELLOW}⚠️ $*${NC}"; }
err(){ printf '%b\n' "${RED}$*${NC}"; }
# Color definitions and logging functions now provided by lib/common.sh
show_build_header(){
printf '\n%b\n' "${BLUE}🔨 AZEROTHCORE BUILD SYSTEM 🔨${NC}"
@@ -38,7 +41,6 @@ Build AzerothCore with custom modules and create deployment-ready images.
Options:
--yes, -y Auto-confirm all prompts
--force Force rebuild even if no changes detected
--force-update Force update source repository to latest commits
--source-path PATH Custom source repository path
--skip-source-setup Skip automatic source repository setup
-h, --help Show this help
@@ -54,7 +56,6 @@ Examples:
./build.sh Interactive build
./build.sh --yes Auto-confirm build
./build.sh --force Force rebuild regardless of state
./build.sh --force-update Update source to latest and build
EOF
}
@@ -62,7 +63,6 @@ while [[ $# -gt 0 ]]; do
case "$1" in
--yes|-y) ASSUME_YES=1; shift;;
--force) FORCE_REBUILD=1; shift;;
--force-update) FORCE_UPDATE=1; shift;;
--source-path) CUSTOM_SOURCE_PATH="$2"; shift 2;;
--skip-source-setup) SKIP_SOURCE_SETUP=1; shift;;
-h|--help) usage; exit 0;;
@@ -243,13 +243,6 @@ ensure_source_repo(){
src_path="${src_path//\/.\//\/}"
if [ -d "$src_path/.git" ]; then
if [ "${FORCE_UPDATE:-0}" = "1" ]; then
info "Force update requested - updating source repository to latest" >&2
if ! (cd "$ROOT_DIR" && ./scripts/bash/setup-source.sh) >&2; then
err "Failed to update source repository" >&2
exit 1
fi
fi
echo "$src_path"
return
fi
@@ -550,10 +543,6 @@ stage_modules(){
rm -f "$staging_modules_dir/.modules_state" "$staging_modules_dir/.requires_rebuild" 2>/dev/null || true
fi
# Export environment variables needed by module hooks
export STACK_SOURCE_VARIANT="$(read_env STACK_SOURCE_VARIANT "core")"
export MODULES_REBUILD_SOURCE_PATH="$(read_env MODULES_REBUILD_SOURCE_PATH "")"
if ! (cd "$local_modules_dir" && bash "$ROOT_DIR/scripts/bash/manage-modules.sh"); then
err "Module staging failed; aborting build"
return 1

View File

@@ -7,6 +7,12 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$SCRIPT_DIR"
cd "$PROJECT_ROOT"
# Source common library for standardized logging
if ! source "$SCRIPT_DIR/scripts/bash/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load $SCRIPT_DIR/scripts/bash/lib/common.sh" >&2
exit 1
fi
# Load environment configuration (available on deployed servers)
if [ -f ".env" ]; then
set -a
@@ -20,11 +26,10 @@ OUTPUT_DIR="${CHANGELOG_OUTPUT_DIR:-./changelogs}"
DAYS_BACK="${CHANGELOG_DAYS_BACK:-7}"
FORMAT="${CHANGELOG_FORMAT:-markdown}"
# Colors for output
GREEN='\033[0;32m'; BLUE='\033[0;34m'; YELLOW='\033[1;33m'; NC='\033[0m'
log() { echo -e "${BLUE}[$(date '+%H:%M:%S')]${NC} $*" >&2; }
success() { echo -e "${GREEN}${NC} $*" >&2; }
warn() { echo -e "${YELLOW}⚠️${NC} $*" >&2; }
# Specialized logging with timestamp for changelog context
log() { info "[$(date '+%H:%M:%S')] $*"; }
success() { ok "$*"; }
# warn() function already provided by lib/common.sh
usage() {
cat <<EOF
@@ -99,36 +104,7 @@ done
# Get last build time from container metadata
get_last_build_time() {
local containers=("ac-worldserver" "ac-authserver")
local images=()
# Require COMPOSE_PROJECT_NAME to be set
if [[ -z "${COMPOSE_PROJECT_NAME:-}" ]]; then
warn "COMPOSE_PROJECT_NAME not set in environment"
return 1
fi
# Use actual image names from environment
# Detect variant to check appropriate images
if [[ "${STACK_IMAGE_MODE:-standard}" == "playerbots" ]] || [[ "${MODULE_PLAYERBOTS:-0}" == "1" ]] || [[ "${PLAYERBOT_ENABLED:-0}" == "1" ]] || [[ "${STACK_SOURCE_VARIANT:-}" == "playerbots" ]]; then
if [[ -z "${AC_WORLDSERVER_IMAGE_PLAYERBOTS:-}" ]] || [[ -z "${AC_AUTHSERVER_IMAGE_PLAYERBOTS:-}" ]]; then
warn "Playerbots mode detected but AC_WORLDSERVER_IMAGE_PLAYERBOTS or AC_AUTHSERVER_IMAGE_PLAYERBOTS not set"
return 1
fi
images=(
"${AC_WORLDSERVER_IMAGE_PLAYERBOTS}"
"${AC_AUTHSERVER_IMAGE_PLAYERBOTS}"
)
else
if [[ -z "${AC_WORLDSERVER_IMAGE:-}" ]] || [[ -z "${AC_AUTHSERVER_IMAGE:-}" ]]; then
warn "Standard mode detected but AC_WORLDSERVER_IMAGE or AC_AUTHSERVER_IMAGE not set"
return 1
fi
images=(
"${AC_WORLDSERVER_IMAGE}"
"${AC_AUTHSERVER_IMAGE}"
)
fi
local images=("azerothcore-stack:worldserver-playerbots" "azerothcore-stack:authserver-playerbots")
local latest_date=""
# Try to get build timestamp from containers and images
@@ -172,7 +148,7 @@ if [[ -n "$SINCE_DATE" ]]; then
DATE_DESC="since $SINCE_DATE"
else
# Try to use last build time as default
LAST_BUILD_DATE=$(get_last_build_time 2>/dev/null) || LAST_BUILD_DATE=""
LAST_BUILD_DATE=$(get_last_build_time)
if [[ -n "$LAST_BUILD_DATE" ]]; then
SINCE_OPTION="--since=$LAST_BUILD_DATE"
@@ -223,17 +199,11 @@ detect_source_config() {
$VERBOSE && log "Switched to playerbots variant" >&2
fi
# Repository URLs from environment (required)
local standard_repo="${ACORE_REPO_STANDARD}"
local standard_branch="${ACORE_BRANCH_STANDARD}"
local playerbots_repo="${ACORE_REPO_PLAYERBOTS}"
local playerbots_branch="${ACORE_BRANCH_PLAYERBOTS}"
if [[ -z "$standard_repo" ]] || [[ -z "$standard_branch" ]] || [[ -z "$playerbots_repo" ]] || [[ -z "$playerbots_branch" ]]; then
warn "Repository configuration missing from environment"
warn "Required: ACORE_REPO_STANDARD, ACORE_BRANCH_STANDARD, ACORE_REPO_PLAYERBOTS, ACORE_BRANCH_PLAYERBOTS"
return 1
fi
# Repository URLs from environment or defaults
local standard_repo="${ACORE_REPO_STANDARD:-https://github.com/azerothcore/azerothcore-wotlk.git}"
local standard_branch="${ACORE_BRANCH_STANDARD:-master}"
local playerbots_repo="${ACORE_REPO_PLAYERBOTS:-https://github.com/mod-playerbots/azerothcore-wotlk.git}"
local playerbots_branch="${ACORE_BRANCH_PLAYERBOTS:-Playerbot}"
if [[ "$variant" == "playerbots" ]]; then
echo "$playerbots_repo|$playerbots_branch|$LOCAL_STORAGE_ROOT/source/azerothcore-playerbots"

View File

@@ -14,6 +14,13 @@ PROJECT_DIR="${SCRIPT_DIR}"
DEFAULT_COMPOSE_FILE="${PROJECT_DIR}/docker-compose.yml"
ENV_FILE="${PROJECT_DIR}/.env"
TEMPLATE_FILE="${PROJECT_DIR}/.env.template"
# Source common library with proper error handling
if ! source "${PROJECT_DIR}/scripts/bash/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load ${PROJECT_DIR}/scripts/bash/lib/common.sh" >&2
echo "This library is required for cleanup.sh to function." >&2
exit 1
fi
source "${PROJECT_DIR}/scripts/bash/project_name.sh"
# Default project name (read from .env or template)
@@ -21,17 +28,16 @@ DEFAULT_PROJECT_NAME="$(project_name::resolve "$ENV_FILE" "$TEMPLATE_FILE")"
source "${PROJECT_DIR}/scripts/bash/compose_overrides.sh"
declare -a COMPOSE_FILE_ARGS=()
# Colors
RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; BLUE='\033[0;34m'; MAGENTA='\033[0;35m'; NC='\033[0m'
# Color definitions now provided by lib/common.sh
# Legacy print_status function for cleanup.sh compatibility
print_status() {
case "$1" in
INFO) echo -e "${BLUE} ${2}${NC}";;
SUCCESS) echo -e "${GREEN}${2}${NC}";;
WARNING) echo -e "${YELLOW}⚠️ ${2}${NC}";;
ERROR) echo -e "${RED}${2}${NC}";;
DANGER) echo -e "${RED}💀 ${2}${NC}";;
HEADER) echo -e "\n${MAGENTA}=== ${2} ===${NC}";;
INFO) info "${2}";;
SUCCESS) ok "${2}";;
WARNING) warn "${2}";;
ERROR) err "${2}";;
DANGER) printf '%b\n' "${RED}💀 ${2}${NC}";;
HEADER) printf '\n%b\n' "${CYAN}=== ${2} ===${NC}";;
esac
}
@@ -146,6 +152,8 @@ sanitize_project_name(){
project_name::sanitize "$1"
}
PROJECT_IMAGE_PREFIX="$(sanitize_project_name "${COMPOSE_PROJECT_NAME:-$DEFAULT_PROJECT_NAME}")"
remove_storage_dir(){
local path="$1"
if [ -d "$path" ]; then
@@ -221,7 +229,8 @@ nuclear_cleanup() {
# Remove project images (server/tool images typical to this project)
execute_command "Remove acore images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E '^acore/' | xargs -r docker rmi"
execute_command "Remove project-specific images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E \"^${PROJECT_NAME}:\" | xargs -r docker rmi"
execute_command "Remove local project images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E '^${PROJECT_IMAGE_PREFIX}:' | xargs -r docker rmi"
execute_command "Remove legacy playerbots images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E '^uprightbass360/azerothcore-wotlk-playerbots' | xargs -r docker rmi"
execute_command "Remove tool images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E 'phpmyadmin|uprightbass360/keira3' | xargs -r docker rmi"
# Storage cleanup (preserve backups if requested)

View File

@@ -491,7 +491,7 @@
"block_reason": "Runtime error: SQL error: MODULE_mod-black-market_creature.sql references removed 'StatsCount' column",
"order": 5000,
"config_cleanup": [],
"notes": "Disabled due to runtime error: SQL error: MODULE_mod-black-market_creature.sql references removed 'StatsCount' column \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "Disabled due to runtime error: SQL error: MODULE_mod-black-market_creature.sql references removed 'StatsCount' column",
"last_modified": "2025-06-26T14:23:47Z"
},
{
@@ -1490,7 +1490,7 @@
"description": "Module for WoW 3.3.5a (AzerothCore \u2013 Playerbots). Tested on Ubuntu.",
"type": "cpp",
"category": "database",
"notes": "Disabled due to runtime error: MODULE_mod-guild-village_001_creature_template.sql tries to insert duplicate creature ID 987400 (ERROR 1062) \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "Disabled due to runtime error: MODULE_mod-guild-village_001_creature_template.sql tries to insert duplicate creature ID 987400 (ERROR 1062)",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -1581,7 +1581,7 @@
"description": "Module for WoW 3.3.5a (AzerothCore \u2013 Playerbots). Tested on Ubuntu.",
"type": "cpp",
"category": "database",
"notes": "Disabled due to runtime error: MODULE_mod-instance-tools_Creature.sql tries to insert duplicate creature ID 987456-0 (ERROR 1062) \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "Disabled due to runtime error: MODULE_mod-instance-tools_Creature.sql tries to insert duplicate creature ID 987456-0 (ERROR 1062)",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -1809,7 +1809,7 @@
"description": "Hardcore trial mod for groups of 1-5",
"type": "cpp",
"category": "progression",
"notes": "DISABLED: no member named 'isEmpty' in 'MapRefMgr' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: no member named 'isEmpty' in 'MapRefMgr'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -2005,7 +2005,7 @@
"description": "AoE loot module for AzerothCore built from the ground up without loot loss. (No Loot Merging).",
"type": "cpp",
"category": "quality-of-life",
"notes": "DISABLED: Naming conflict with Item class \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: Naming conflict with Item class",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -2142,7 +2142,7 @@
"description": "Adds Mist of Pandaria's Brawler's Guild to AzerothCore.",
"type": "cpp",
"category": "minigame",
"notes": "DISABLED: no matching member function for call to 'DelayEvents' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: no matching member function for call to 'DelayEvents'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -2188,7 +2188,7 @@
"description": "Mercado negro",
"type": "cpp",
"category": "database",
"notes": "DISABLED: SQL schema mismatch - StatsCount column doesn't exist \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: SQL schema mismatch - StatsCount column doesn't exist",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -2835,7 +2835,7 @@
"description": "Prestige level system mod for Azerothcore",
"type": "cpp",
"category": "progression",
"notes": "DISABLED: 'OnLogin' marked 'override' but does not override \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: 'OnLogin' marked 'override' but does not override",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -2851,7 +2851,7 @@
"description": "Adds extra difficulty modes, heavily inspired by mod-challenge-modes.",
"type": "cpp",
"category": "progression",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -2897,7 +2897,7 @@
"description": "This module aims to make mounts, companions and heirlooms shared across all characters of an account",
"type": "cpp",
"category": "account-wide",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3141,7 +3141,7 @@
"description": "An Azeroth Core module to allow the display of the Breaking News section on the character select screen.",
"type": "cpp",
"category": "social",
"notes": "DISABLED: no member named 'StringFormatFmt' in namespace 'Acore' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: no member named 'StringFormatFmt' in namespace 'Acore'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3172,7 +3172,7 @@
"description": "This WoW-Azerothcore-Mod allows to change spawntimes based on a userdefined or dynamically calculated playerbased factor",
"type": "cpp",
"category": "progression",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3188,7 +3188,7 @@
"description": "This mod allows noclip with a command noclip. on / off",
"type": "cpp",
"category": "admin",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3249,7 +3249,7 @@
"description": "AzerothCore port (as a Module) of Rochet2's Objscale",
"type": "cpp",
"category": "progression",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3311,7 +3311,7 @@
"description": "An AzerothCore module that recycles unwanted items to the auction house.",
"type": "cpp",
"category": "economy",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3342,7 +3342,7 @@
"description": "AzerothCore module that adds a random attribute book.",
"type": "cpp",
"category": "admin",
"notes": "DISABLED: 'OnLogin' marked 'override' but does not override \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: 'OnLogin' marked 'override' but does not override",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3358,7 +3358,7 @@
"description": "AzerothCore module that allows prestige at max level.",
"type": "cpp",
"category": "progression",
"notes": "DISABLED: use of undeclared identifier 'sSpellMgr' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: use of undeclared identifier 'sSpellMgr'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3405,7 +3405,7 @@
"description": "An Azeroth Core module that adds alternative XP gains.",
"type": "cpp",
"category": "progression",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3437,7 +3437,7 @@
"description": "Ensures all party members have matching faction before queueing into battleground.",
"type": "cpp",
"category": "pvp",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3484,7 +3484,7 @@
"description": "AzerothCore custom module which allows filtering traded items",
"type": "cpp",
"category": "economy",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3500,7 +3500,7 @@
"description": "This module allows you to search for quests by ID, which gives you greater precision in your search.",
"type": "cpp",
"category": "scripting",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3516,7 +3516,7 @@
"description": "PvPScript for Azerothcore",
"type": "cpp",
"category": "pvp",
"notes": "DISABLED: no member named 'SendNotification' in 'WorldSession' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: no member named 'SendNotification' in 'WorldSession'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3532,7 +3532,7 @@
"description": "Broadcasts items with the ITEM_FLAG_REPORT_TO_GUILD_CHAT flag to guild chat.",
"type": "cpp",
"category": "scripting",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3548,7 +3548,7 @@
"description": "Fixes FFA for safe zones.",
"type": "cpp",
"category": "content",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3564,7 +3564,7 @@
"description": "AzerothCore Interconnect",
"type": "cpp",
"category": "tooling",
"notes": "DISABLED: no member named 'StringFormatFmt' in namespace 'Acore' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: no member named 'StringFormatFmt' in namespace 'Acore'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3708,7 +3708,7 @@
"description": "Module for Azerothcore to teleport players to with a command",
"type": "cpp",
"category": "quality-of-life",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3739,7 +3739,7 @@
"description": "Adds a re-roll item for StatBooster bonus stat.",
"type": "cpp",
"category": "rewards",
"notes": "DISABLED: 'StatBoostMgr.h' file not found \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: 'StatBoostMgr.h' file not found",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3835,7 +3835,7 @@
"description": "All-In-One Solution module to easily enable features for new players",
"type": "cpp",
"category": "progression",
"notes": "DISABLED: no member named 'getLevel'; did you mean 'GetLevel'? \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: no member named 'getLevel'; did you mean 'GetLevel'?",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3851,7 +3851,7 @@
"description": "Custom scipts and modules for Azerothcore",
"type": "cpp",
"category": "scripting",
"notes": "DISABLED: no member named 'PQuery' / 'outString' in Log \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: no member named 'PQuery' / 'outString' in Log",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3899,7 +3899,7 @@
"description": "",
"type": "cpp",
"category": "content",
"notes": "DISABLED: Missing config identifier \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: Missing config identifier",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3915,7 +3915,7 @@
"description": "Pushes server and player events into an influxdb instance.",
"type": "cpp",
"category": "scripting",
"notes": "DISABLED: Build fails - requires CURL library (missing: CURL_LIBRARY CURL_INCLUDE_DIR) \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: Build fails - requires CURL library (missing: CURL_LIBRARY CURL_INCLUDE_DIR)",
"status": "blocked",
"block_reason": "CMake Error: Could NOT find CURL",
"order": 5000,
@@ -3931,7 +3931,7 @@
"description": "Spell Regulator module for AzerothCore",
"type": "cpp",
"category": "scripting",
"notes": "DISABLED: redefinition of 'AddSpellRegulatorScripts' \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: redefinition of 'AddSpellRegulatorScripts'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3947,7 +3947,7 @@
"description": "Module for Azerothcore",
"type": "cpp",
"category": "progression",
"notes": "DISABLED: 'ChatHandler' is an incomplete type \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: 'ChatHandler' is an incomplete type",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -3979,7 +3979,7 @@
"description": "WIP reward system for AC ingame shop",
"type": "cpp",
"category": "economy",
"notes": "DISABLED: API incompatibility - ConfigMgr missing GetIntDefault method \nDiscovered via GitHub topic 'azerothcore-module'",
"notes": "DISABLED: API incompatibility - ConfigMgr missing GetIntDefault method",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -4149,7 +4149,7 @@
"description": "ACore CMS based on Wordpress",
"type": "tool",
"category": "tooling",
"notes": "Disabled due to runtime error: Table 'acore_auth.acore_cms_subscriptions' doesn't exist - causes server abort \nDiscovered via GitHub topic 'azerothcore-tools'",
"notes": "Disabled due to runtime error: Table 'acore_auth.acore_cms_subscriptions' doesn't exist - causes server abort",
"status": "blocked",
"block_reason": "Runtime error: Missing database table",
"order": 5000,
@@ -5065,7 +5065,7 @@
"description": "",
"type": "lua",
"category": "scripting",
"notes": "DISABLED: Git clone fails with 'unknown switch E' error - likely due to hyphen in repo name \nDiscovered via GitHub topic 'azerothcore-lua'",
"notes": "DISABLED: Git clone fails with 'unknown switch E' error - likely due to hyphen in repo name",
"status": "blocked",
"block_reason": "Git clone error: unknown switch 'E'",
"order": 5000,
@@ -5233,7 +5233,7 @@
"description": "This module adds thematically appropriate green and blue loot drops to ALL 450 Classic rares in Kalimdor and the Eastern Kingdoms.",
"type": "sql",
"category": "database",
"notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-sql'",
"notes": "DISABLED: only virtual member functions can be marked 'override'",
"status": "blocked",
"order": 5000,
"requires": [],
@@ -5391,104 +5391,6 @@
"post_install_hooks": [],
"config_cleanup": [],
"last_modified": "2020-12-16T18:26:39Z"
},
{
"key": "MODULE_MOD_BOTS_LOGIN_FIX",
"name": "mod-bots-login-fix",
"repo": "https://github.com/BeardBear33/mod-bots-login-fix.git",
"description": "Oprava duplicitn\u00edho p\u0159ipojen\u00ed na AltBota pro modul Playerbots. // Fix for duplicate connections to an AltBot for the Playerbots module.",
"type": "cpp",
"category": "uncategorized",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_MOD_MATERIAL_BANK",
"name": "mod-material-bank",
"repo": "https://github.com/BeardBear33/mod-material-bank.git",
"description": "Module for WoW 3.3.5a (AzerothCore \u2013 Playerbots). Tested on Ubuntu.",
"type": "cpp",
"category": "uncategorized",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_MOD_PROGRESSION_BLIZZLIKE",
"name": "mod-progression-blizzlike",
"repo": "https://github.com/kambire/mod-progression-blizzlike.git",
"description": "Modular progression system for AzerothCore built around brackets (Vanilla/TBC/WotLK + Arena seasons). Each enabled bracket can load its own C++ scripts and SQL updates.",
"type": "cpp",
"category": "uncategorized",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_MOD_PYTHON_ENGINE",
"name": "mod-python-engine",
"repo": "https://github.com/privatecore/mod-python-engine.git",
"description": "A Python Scripting Engine module for AzerothCore",
"type": "cpp",
"category": "uncategorized",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_WRATH_OF_THE_VANILLA_V2",
"name": "wrath-of-the-vanilla-v2",
"repo": "https://github.com/Hextv/wrath-of-the-vanilla-v2.git",
"description": "Project that focuses on turning a server running AzerothCore into one limited to the original (vanilla) content.",
"type": "sql",
"category": "database",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_AZEROTHMCP",
"name": "azerothMCP",
"repo": "https://github.com/blinkysc/azerothMCP.git",
"description": "MCP server for AzerothCore",
"type": "tool",
"category": "tooling",
"notes": "Discovered via GitHub topic 'azerothcore-tools' Not directly related to runtime functionality of AzerothCore",
"status": "blocked",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_AZEROTHCORE_REALMMASTER",
"name": "AzerothCore-RealmMaster",
"repo": "https://github.com/uprightbass360/AzerothCore-RealmMaster.git",
"description": "Automated AzerothCore docker stack with tooling. ",
"type": "tool",
"category": "tooling",
"notes": "Discovered via GitHub topic 'azerothcore-tools'",
"status": "blocked",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
}
]
}

View File

@@ -2,7 +2,7 @@
"modules": [
],
"label": "\ud83d\udd30 AzerothCore Main - Mod Free",
"label": "\u2b50 AzerothCore Main - Mod Free",
"description": "Pure AzerothCore with no optional modules enabled",
"order": 3
}

View File

@@ -12,6 +12,13 @@ ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
DEFAULT_COMPOSE_FILE="$ROOT_DIR/docker-compose.yml"
ENV_PATH="$ROOT_DIR/.env"
TEMPLATE_PATH="$ROOT_DIR/.env.template"
# Source common library with proper error handling
if ! source "$ROOT_DIR/scripts/bash/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load $ROOT_DIR/scripts/bash/lib/common.sh" >&2
echo "This library is required for deploy.sh to function." >&2
exit 1
fi
source "$ROOT_DIR/scripts/bash/project_name.sh"
# Default project name (read from .env or template)
@@ -46,11 +53,7 @@ MODULE_STATE_INITIALIZED=0
declare -a MODULES_COMPILE_LIST=()
declare -a COMPOSE_FILE_ARGS=()
BLUE='\033[0;34m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; RED='\033[0;31m'; NC='\033[0m'
info(){ printf '%b\n' "${BLUE} $*${NC}"; }
ok(){ printf '%b\n' "${GREEN}$*${NC}"; }
warn(){ printf '%b\n' "${YELLOW}⚠️ $*${NC}"; }
err(){ printf '%b\n' "${RED}$*${NC}"; }
# Color definitions and logging functions now provided by lib/common.sh
show_deployment_header(){
printf '\n%b\n' "${BLUE}⚔️ AZEROTHCORE REALM DEPLOYMENT ⚔️${NC}"

View File

@@ -1,11 +1,110 @@
name: ${COMPOSE_PROJECT_NAME}
# =============================================================================
# YAML ANCHORS - Shared Configuration Templates
# =============================================================================
x-logging: &logging-default
driver: json-file
options:
max-size: "10m"
max-file: "3"
# Common database connection environment variables
x-database-config: &database-config
CONTAINER_MYSQL: ${CONTAINER_MYSQL}
MYSQL_PORT: ${MYSQL_PORT}
MYSQL_USER: ${MYSQL_USER}
MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD}
DB_AUTH_NAME: ${DB_AUTH_NAME}
DB_WORLD_NAME: ${DB_WORLD_NAME}
DB_CHARACTERS_NAME: ${DB_CHARACTERS_NAME}
DB_RECONNECT_SECONDS: ${DB_RECONNECT_SECONDS}
DB_RECONNECT_ATTEMPTS: ${DB_RECONNECT_ATTEMPTS}
# AzerothCore database connection strings
x-azerothcore-databases: &azerothcore-databases
AC_LOGIN_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_AUTH_NAME}"
AC_WORLD_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_WORLD_NAME}"
AC_CHARACTER_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_CHARACTERS_NAME}"
# Common storage volume mounts
x-storage-volumes: &storage-volumes
- ${STORAGE_CONFIG_PATH:-${STORAGE_PATH}/config}:/azerothcore/env/dist/etc
- ${STORAGE_LOGS_PATH:-${STORAGE_PATH}/logs}:/azerothcore/logs
- ${STORAGE_LOGS_PATH:-${STORAGE_PATH}/logs}:/azerothcore/env/dist/logs
# Authserver common configuration
x-authserver-common: &authserver-common
user: "${CONTAINER_USER}"
environment:
<<: *azerothcore-databases
AC_UPDATES_ENABLE_DATABASES: "0"
AC_BIND_IP: "0.0.0.0"
AC_LOG_LEVEL: "1"
AC_LOGGER_ROOT_CONFIG: "1,Console"
AC_LOGGER_SERVER_CONFIG: "1,Console"
AC_APPENDER_CONSOLE_CONFIG: "1,2,0"
volumes: *storage-volumes
ports:
- "${AUTH_EXTERNAL_PORT}:${AUTH_PORT}"
restart: unless-stopped
logging: *logging-default
networks:
- azerothcore
cap_add: ["SYS_NICE"]
healthcheck: &auth-healthcheck
test: ["CMD", "sh", "-c", "ps aux | grep '[a]uthserver' | grep -v grep || exit 1"]
interval: ${AUTH_HEALTHCHECK_INTERVAL}
timeout: ${AUTH_HEALTHCHECK_TIMEOUT}
retries: ${AUTH_HEALTHCHECK_RETRIES}
start_period: ${AUTH_HEALTHCHECK_START_PERIOD}
# Worldserver common configuration
x-worldserver-common: &worldserver-common
user: "${CONTAINER_USER}"
stdin_open: true
tty: true
environment:
<<: *azerothcore-databases
AC_UPDATES_ENABLE_DATABASES: "7"
AC_BIND_IP: "0.0.0.0"
AC_DATA_DIR: "/azerothcore/data"
AC_SOAP_PORT: "${SOAP_PORT}"
AC_PROCESS_PRIORITY: "0"
AC_ELUNA_ENABLED: "${AC_ELUNA_ENABLED}"
AC_ELUNA_TRACE_BACK: "${AC_ELUNA_TRACE_BACK}"
AC_ELUNA_AUTO_RELOAD: "${AC_ELUNA_AUTO_RELOAD}"
AC_ELUNA_BYTECODE_CACHE: "${AC_ELUNA_BYTECODE_CACHE}"
AC_ELUNA_SCRIPT_PATH: "${AC_ELUNA_SCRIPT_PATH}"
AC_ELUNA_REQUIRE_PATHS: "${AC_ELUNA_REQUIRE_PATHS}"
AC_ELUNA_REQUIRE_CPATHS: "${AC_ELUNA_REQUIRE_CPATHS}"
AC_ELUNA_AUTO_RELOAD_INTERVAL: "${AC_ELUNA_AUTO_RELOAD_INTERVAL}"
PLAYERBOT_ENABLED: "${PLAYERBOT_ENABLED}"
PLAYERBOT_MAX_BOTS: "${PLAYERBOT_MAX_BOTS}"
AC_LOG_LEVEL: "2"
ports:
- "${WORLD_EXTERNAL_PORT}:${WORLD_PORT}"
- "${SOAP_EXTERNAL_PORT}:${SOAP_PORT}"
volumes:
- ${CLIENT_DATA_PATH:-${STORAGE_CLIENT_DATA_PATH:-${STORAGE_PATH}/client-data}}:/azerothcore/data
- ${STORAGE_CONFIG_PATH:-${STORAGE_PATH}/config}:/azerothcore/env/dist/etc
- ${STORAGE_LOGS_PATH:-${STORAGE_PATH}/logs}:/azerothcore/logs
- ${STORAGE_LOGS_PATH:-${STORAGE_PATH}/logs}:/azerothcore/env/dist/logs
- ${STORAGE_MODULES_PATH:-${STORAGE_PATH}/modules}:/azerothcore/modules
- ${STORAGE_LUA_SCRIPTS_PATH:-${STORAGE_PATH}/lua_scripts}:/azerothcore/lua_scripts
restart: unless-stopped
logging: *logging-default
networks:
- azerothcore
cap_add: ["SYS_NICE"]
healthcheck: &world-healthcheck
test: ["CMD", "sh", "-c", "ps aux | grep '[w]orldserver' | grep -v grep || exit 1"]
interval: ${WORLD_HEALTHCHECK_INTERVAL}
timeout: ${WORLD_HEALTHCHECK_TIMEOUT}
retries: ${WORLD_HEALTHCHECK_RETRIES}
start_period: ${WORLD_HEALTHCHECK_START_PERIOD}
services:
# =====================
# Database Layer (db)
@@ -25,7 +124,6 @@ services:
MYSQL_MAX_CONNECTIONS: ${MYSQL_MAX_CONNECTIONS}
MYSQL_INNODB_BUFFER_POOL_SIZE: ${MYSQL_INNODB_BUFFER_POOL_SIZE}
MYSQL_INNODB_LOG_FILE_SIZE: ${MYSQL_INNODB_LOG_FILE_SIZE}
MYSQL_BINLOG_EXPIRE_LOGS_SECONDS: 86400
TZ: "${TZ}"
entrypoint:
- /usr/local/bin/mysql-entrypoint.sh
@@ -47,9 +145,6 @@ services:
- --innodb-buffer-pool-size=${MYSQL_INNODB_BUFFER_POOL_SIZE}
- --innodb-log-file-size=${MYSQL_INNODB_LOG_FILE_SIZE}
- --innodb-redo-log-capacity=${MYSQL_INNODB_REDO_LOG_CAPACITY}
- --expire_logs_days=0
- --binlog_expire_logs_seconds=86400
- --binlog_expire_logs_auto_purge=ON
restart: unless-stopped
logging: *logging-default
healthcheck:
@@ -519,10 +614,10 @@ services:
# Services - Standard (services-standard)
# =====================
ac-authserver-standard:
<<: *authserver-common
profiles: ["services-standard"]
image: ${AC_AUTHSERVER_IMAGE}
container_name: ac-authserver
user: "${CONTAINER_USER}"
depends_on:
ac-mysql:
condition: service_healthy
@@ -530,94 +625,26 @@ services:
condition: service_completed_successfully
ac-db-init:
condition: service_completed_successfully
environment:
AC_LOGIN_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_AUTH_NAME}"
AC_UPDATES_ENABLE_DATABASES: "0"
AC_BIND_IP: "0.0.0.0"
AC_LOG_LEVEL: "1"
AC_LOGGER_ROOT_CONFIG: "1,Console"
AC_LOGGER_SERVER_CONFIG: "1,Console"
AC_APPENDER_CONSOLE_CONFIG: "1,2,0"
ports:
- "${AUTH_EXTERNAL_PORT}:${AUTH_PORT}"
restart: unless-stopped
logging: *logging-default
networks:
- azerothcore
volumes:
- ${STORAGE_CONFIG_PATH:-${STORAGE_PATH}/config}:/azerothcore/env/dist/etc
- ${STORAGE_LOGS_PATH:-${STORAGE_PATH}/logs}:/azerothcore/logs
- ${STORAGE_LOGS_PATH:-${STORAGE_PATH}/logs}:/azerothcore/env/dist/logs
cap_add: ["SYS_NICE"]
healthcheck:
test: ["CMD", "sh", "-c", "ps aux | grep '[a]uthserver' | grep -v grep || exit 1"]
interval: ${AUTH_HEALTHCHECK_INTERVAL}
timeout: ${AUTH_HEALTHCHECK_TIMEOUT}
retries: ${AUTH_HEALTHCHECK_RETRIES}
start_period: ${AUTH_HEALTHCHECK_START_PERIOD}
ac-worldserver-standard:
<<: *worldserver-common
profiles: ["services-standard"]
image: ${AC_WORLDSERVER_IMAGE}
container_name: ac-worldserver
user: "${CONTAINER_USER}"
stdin_open: true
tty: true
depends_on:
ac-authserver-standard:
condition: service_healthy
ac-client-data-standard:
condition: service_completed_successfully
environment:
AC_LOGIN_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_AUTH_NAME}"
AC_WORLD_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_WORLD_NAME}"
AC_CHARACTER_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_CHARACTERS_NAME}"
AC_UPDATES_ENABLE_DATABASES: "7"
AC_BIND_IP: "0.0.0.0"
AC_DATA_DIR: "/azerothcore/data"
AC_SOAP_PORT: "${SOAP_PORT}"
AC_PROCESS_PRIORITY: "0"
AC_ELUNA_ENABLED: "${AC_ELUNA_ENABLED}"
AC_ELUNA_TRACE_BACK: "${AC_ELUNA_TRACE_BACK}"
AC_ELUNA_AUTO_RELOAD: "${AC_ELUNA_AUTO_RELOAD}"
AC_ELUNA_BYTECODE_CACHE: "${AC_ELUNA_BYTECODE_CACHE}"
AC_ELUNA_SCRIPT_PATH: "${AC_ELUNA_SCRIPT_PATH}"
AC_ELUNA_REQUIRE_PATHS: "${AC_ELUNA_REQUIRE_PATHS}"
AC_ELUNA_REQUIRE_CPATHS: "${AC_ELUNA_REQUIRE_CPATHS}"
AC_ELUNA_AUTO_RELOAD_INTERVAL: "${AC_ELUNA_AUTO_RELOAD_INTERVAL}"
PLAYERBOT_ENABLED: "${PLAYERBOT_ENABLED}"
PLAYERBOT_MAX_BOTS: "${PLAYERBOT_MAX_BOTS}"
AC_LOG_LEVEL: "2"
ports:
- "${WORLD_EXTERNAL_PORT}:${WORLD_PORT}"
- "${SOAP_EXTERNAL_PORT}:${SOAP_PORT}"
volumes:
- ${CLIENT_DATA_PATH:-${STORAGE_CLIENT_DATA_PATH:-${STORAGE_PATH}/client-data}}:/azerothcore/data
- ${STORAGE_CONFIG_PATH:-${STORAGE_PATH}/config}:/azerothcore/env/dist/etc
- ${STORAGE_LOGS_PATH:-${STORAGE_PATH}/logs}:/azerothcore/logs
- ${STORAGE_LOGS_PATH:-${STORAGE_PATH}/logs}:/azerothcore/env/dist/logs
- ${STORAGE_MODULES_PATH:-${STORAGE_PATH}/modules}:/azerothcore/modules
- ${STORAGE_LUA_SCRIPTS_PATH:-${STORAGE_PATH}/lua_scripts}:/azerothcore/lua_scripts
restart: unless-stopped
logging: *logging-default
networks:
- azerothcore
cap_add: ["SYS_NICE"]
healthcheck:
test: ["CMD", "sh", "-c", "ps aux | grep '[w]orldserver' | grep -v grep || exit 1"]
interval: ${WORLD_HEALTHCHECK_INTERVAL}
timeout: ${WORLD_HEALTHCHECK_TIMEOUT}
retries: ${WORLD_HEALTHCHECK_RETRIES}
start_period: ${WORLD_HEALTHCHECK_START_PERIOD}
# =====================
# Services - Playerbots (services-playerbots)
# =====================
ac-authserver-playerbots:
<<: *authserver-common
profiles: ["services-playerbots"]
image: ${AC_AUTHSERVER_IMAGE_PLAYERBOTS}
container_name: ac-authserver
user: "${CONTAINER_USER}"
depends_on:
ac-mysql:
condition: service_healthy
@@ -626,7 +653,7 @@ services:
ac-db-init:
condition: service_completed_successfully
environment:
AC_LOGIN_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_AUTH_NAME}"
<<: *azerothcore-databases
AC_UPDATES_ENABLE_DATABASES: "0"
AC_BIND_IP: "0.0.0.0"
TZ: "${TZ}"
@@ -634,21 +661,6 @@ services:
AC_LOGGER_ROOT_CONFIG: "1,Console"
AC_LOGGER_SERVER_CONFIG: "1,Console"
AC_APPENDER_CONSOLE_CONFIG: "1,2,0"
ports:
- "${AUTH_EXTERNAL_PORT}:${AUTH_PORT}"
restart: unless-stopped
logging: *logging-default
networks:
- azerothcore
volumes:
- ${STORAGE_CONFIG_PATH:-${STORAGE_PATH}/config}:/azerothcore/env/dist/etc
cap_add: ["SYS_NICE"]
healthcheck:
test: ["CMD", "sh", "-c", "ps aux | grep '[a]uthserver' | grep -v grep || exit 1"]
interval: ${AUTH_HEALTHCHECK_INTERVAL}
timeout: ${AUTH_HEALTHCHECK_TIMEOUT}
retries: ${AUTH_HEALTHCHECK_RETRIES}
start_period: ${AUTH_HEALTHCHECK_START_PERIOD}
ac-authserver-modules:
profiles: ["services-modules"]
@@ -687,12 +699,10 @@ services:
start_period: ${AUTH_HEALTHCHECK_START_PERIOD}
ac-worldserver-playerbots:
<<: *worldserver-common
profiles: ["services-playerbots"]
image: ${AC_WORLDSERVER_IMAGE_PLAYERBOTS}
container_name: ac-worldserver
user: "${CONTAINER_USER}"
stdin_open: true
tty: true
depends_on:
ac-authserver-playerbots:
condition: service_healthy
@@ -701,9 +711,7 @@ services:
ac-db-guard:
condition: service_healthy
environment:
AC_LOGIN_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_AUTH_NAME}"
AC_WORLD_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_WORLD_NAME}"
AC_CHARACTER_DATABASE_INFO: "${CONTAINER_MYSQL};${MYSQL_PORT};${MYSQL_USER};${MYSQL_ROOT_PASSWORD};${DB_CHARACTERS_NAME}"
<<: *azerothcore-databases
AC_UPDATES_ENABLE_DATABASES: "7"
AC_BIND_IP: "0.0.0.0"
AC_DATA_DIR: "/azerothcore/data"
@@ -721,27 +729,6 @@ services:
PLAYERBOT_ENABLED: "${PLAYERBOT_ENABLED}"
PLAYERBOT_MAX_BOTS: "${PLAYERBOT_MAX_BOTS}"
AC_LOG_LEVEL: "2"
ports:
- "${WORLD_EXTERNAL_PORT}:${WORLD_PORT}"
- "${SOAP_EXTERNAL_PORT}:${SOAP_PORT}"
volumes:
- ${CLIENT_DATA_PATH:-${STORAGE_CLIENT_DATA_PATH:-${STORAGE_PATH}/client-data}}:/azerothcore/data
- ${STORAGE_CONFIG_PATH:-${STORAGE_PATH}/config}:/azerothcore/env/dist/etc
- ${STORAGE_LOGS_PATH:-${STORAGE_PATH}/logs}:/azerothcore/logs
- ${STORAGE_LOGS_PATH:-${STORAGE_PATH}/logs}:/azerothcore/env/dist/logs
- ${STORAGE_MODULES_PATH:-${STORAGE_PATH}/modules}:/azerothcore/modules
- ${STORAGE_LUA_SCRIPTS_PATH:-${STORAGE_PATH}/lua_scripts}:/azerothcore/lua_scripts
restart: unless-stopped
logging: *logging-default
networks:
- azerothcore
cap_add: ["SYS_NICE"]
healthcheck:
test: ["CMD", "sh", "-c", "ps aux | grep '[w]orldserver' | grep -v grep || exit 1"]
interval: ${WORLD_HEALTHCHECK_INTERVAL}
timeout: ${WORLD_HEALTHCHECK_TIMEOUT}
retries: ${WORLD_HEALTHCHECK_RETRIES}
start_period: ${WORLD_HEALTHCHECK_START_PERIOD}
ac-worldserver-modules:
profiles: ["services-modules"]

View File

@@ -142,7 +142,7 @@ MODULES_ENABLED="mod-playerbots mod-aoe-loot ..."
**What Gets Built:**
- AzerothCore with playerbots branch
- 93 modules compiled and integrated in this run (current manifest: 348 total / 221 supported)
- Custom Docker images: `${COMPOSE_PROJECT_NAME}:worldserver-modules-latest` etc.
- Custom Docker images: `acore-compose:worldserver-modules-latest` etc.
### Deployment Status: READY TO DEPLOY 🚀

View File

@@ -140,147 +140,6 @@ Restores user accounts and characters from backup while preserving world data.
- `acore_characters.sql[.gz]` - Character data (required)
- `acore_world.sql[.gz]` - World data (optional)
#### `scripts/bash/pdump-import.sh` - Character Import
Imports individual character dump files into the database.
```bash
# Import character from pdump file
./scripts/bash/pdump-import.sh --file character.pdump --account testuser --password azerothcore123
# Import with character rename
./scripts/bash/pdump-import.sh --file oldchar.pdump --account newuser --name "NewName" --password azerothcore123
# Validate pdump without importing (dry run)
./scripts/bash/pdump-import.sh --file character.pdump --account testuser --password azerothcore123 --dry-run
```
**Features:**
- Automatic GUID assignment or manual override with `--guid`
- Character renaming during import with `--name`
- Account validation and character name uniqueness checks
- Automatic database backup before import
- Safe server restart handling
#### `scripts/bash/import-pdumps.sh` - Batch Character Import
Processes multiple character dump files from the `import/pdumps/` directory.
```bash
# Import all pdumps with environment settings
./scripts/bash/import-pdumps.sh --password azerothcore123 --account defaultuser
# Non-interactive batch import
./scripts/bash/import-pdumps.sh --password azerothcore123 --non-interactive
```
**Directory Structure:**
```
import/pdumps/
├── character1.pdump # Character dump files
├── character2.sql # SQL dump files also supported
├── configs/ # Optional per-character configuration
│ ├── character1.conf # account=user1, name=NewName
│ └── character2.conf # account=user2, guid=5000
└── processed/ # Successfully imported files moved here
```
**Configuration Format (`.conf`):**
```ini
account=target_account_name_or_id
name=new_character_name # Optional: rename character
guid=force_specific_guid # Optional: force GUID
```
### Security Management Scripts
#### `scripts/bash/bulk-2fa-setup.sh` - Bulk 2FA Setup
Configures TOTP 2FA for multiple AzerothCore accounts using official SOAP API.
```bash
# Setup 2FA for all accounts without it
./scripts/bash/bulk-2fa-setup.sh --all
# Setup for specific accounts
./scripts/bash/bulk-2fa-setup.sh --account user1 --account user2
# Force regenerate with custom issuer
./scripts/bash/bulk-2fa-setup.sh --all --force --issuer "MyServer"
# Preview what would be done
./scripts/bash/bulk-2fa-setup.sh --all --dry-run
# Use custom SOAP credentials
./scripts/bash/bulk-2fa-setup.sh --all --soap-user admin --soap-pass adminpass
# Show help / options
./scripts/bash/bulk-2fa-setup.sh --help
```
**Features:**
- **Official AzerothCore API Integration**: Uses SOAP commands instead of direct database manipulation
- Generates AzerothCore-compatible 16-character Base32 TOTP secrets (longer secrets are rejected by SOAP)
- Automatic account discovery or specific targeting
- QR code generation for authenticator apps
- Force regeneration of existing 2FA secrets
- Comprehensive output with setup instructions
- Safe dry-run mode for testing
- SOAP connectivity validation
- Proper error handling and validation
**Requirements:**
- AzerothCore worldserver with SOAP enabled (SOAP.Enabled = 1)
- SOAP port exposed on 7778 (SOAP.Port = 7878, mapped to external 7778)
- Remote Access enabled (Ra.Enable = 1) in worldserver.conf
- SOAP.IP = "0.0.0.0" for external connectivity
- GM account with sufficient privileges (gmlevel 3)
- Provide SOAP credentials explicitly via `--soap-user` and `--soap-pass` (these are required; no env fallback)
**Output Structure:**
```
./2fa-setup-TIMESTAMP/
├── qr-codes/ # QR code images for each account
├── setup-report.txt # Complete setup summary
├── console-commands.txt # Manual verification commands
└── secrets-backup.csv # Secure backup of all secrets
```
**Security Notes:**
- Generated QR codes and backup files contain sensitive TOTP secrets
- Distribute QR codes securely to users
- Delete or encrypt backup files after distribution
- TOTP secrets are also stored in AzerothCore database
#### `scripts/bash/generate-2fa-qr.sh` / `generate-2fa-qr.py` - Individual 2FA Setup
Generate QR codes for individual account 2FA setup.
> Tip: each script supports `-h/--help` to see all options.
```bash
# Generate QR code for single account
./scripts/bash/generate-2fa-qr.sh -u username
# Use custom issuer and output path
./scripts/bash/generate-2fa-qr.sh -u username -i "MyServer" -o /tmp/qr.png
# Use existing secret
./scripts/bash/generate-2fa-qr.sh -u username -s JBSWY3DPEHPK3PXP
# Show help / options
./scripts/bash/generate-2fa-qr.sh -h
```
> AzerothCore's SOAP endpoint only accepts 16-character Base32 secrets (A-Z and 2-7). The generators enforce this length to avoid "The provided two-factor authentication secret is not valid" errors.
#### `scripts/bash/test-2fa-token.py` - Generate TOTP Test Codes
Quickly verify a 16-character Base32 secret produces valid 6-digit codes.
```bash
# Show help
./scripts/bash/test-2fa-token.py --help
# Generate two consecutive codes for a secret
./scripts/bash/test-2fa-token.py -s JBSWY3DPEHPK3PXP -c 2
```
### Module Management Scripts
#### `scripts/bash/stage-modules.sh` - Module Staging

View File

@@ -7,8 +7,7 @@ This directory allows you to easily import custom database files and configurati
```
import/
├── db/ # Database SQL files to import
── conf/ # Configuration file overrides
└── pdumps/ # Character dump files to import
── conf/ # Configuration file overrides
```
## 🗄️ Database Import (`import/db/`)
@@ -94,31 +93,6 @@ AiPlayerbot.MaxRandomBots = 200
See `config/CONFIG_MANAGEMENT.md` for detailed preset documentation.
## 🎮 Character Import (`import/pdumps/`)
Import character dump files from other AzerothCore servers.
### Supported Formats
- **`.pdump`** - Character dump files from `.pdump write` command
- **`.sql`** - SQL character dump files
### Quick Start
1. Place character dump files in `import/pdumps/`
2. Run the import script:
```bash
./scripts/bash/import-pdumps.sh --password your_mysql_password --account target_account
```
### Advanced Configuration
Create `import/pdumps/configs/filename.conf` for per-character settings:
```ini
account=target_account
name=NewCharacterName # Optional: rename
guid=5000 # Optional: force GUID
```
**📖 For complete character import documentation, see [import/pdumps/README.md](pdumps/README.md)**
## 🔄 Automated Import
Both database and configuration imports are automatically handled during:
@@ -144,7 +118,6 @@ Both database and configuration imports are automatically handled during:
## 📚 Related Documentation
- [Character Import Guide](pdumps/README.md) - Complete pdump import documentation
- [Database Management](../docs/DATABASE_MANAGEMENT.md)
- [Configuration Management](../config/CONFIG_MANAGEMENT.md)
- [Module Management](../docs/ADVANCED.md#module-management)

View File

@@ -1,192 +0,0 @@
# Character PDump Import
This directory allows you to easily import character pdump files into your AzerothCore server.
## 📁 Directory Structure
```
import/pdumps/
├── README.md # This file
├── *.pdump # Place your character dump files here
├── *.sql # SQL dump files also supported
├── configs/ # Optional per-file configuration
│ ├── character1.conf
│ └── character2.conf
├── examples/ # Example files and configurations
└── processed/ # Successfully imported files are moved here
```
## 🎮 Character Dump Import
### Quick Start
1. **Place your pdump files** in this directory:
```bash
cp /path/to/mycharacter.pdump import/pdumps/
```
2. **Run the import script**:
```bash
./scripts/bash/import-pdumps.sh --password your_mysql_password --account target_account
```
3. **Login and play** - your characters are now available!
### Supported File Formats
- **`.pdump`** - Character dump files from AzerothCore `.pdump write` command
- **`.sql`** - SQL character dump files
### Configuration Options
#### Environment Variables (`.env`)
```bash
# Set default account for all imports
DEFAULT_IMPORT_ACCOUNT=testuser
# Database credentials (usually already set)
MYSQL_ROOT_PASSWORD=your_mysql_password
ACORE_DB_AUTH_NAME=acore_auth
ACORE_DB_CHARACTERS_NAME=acore_characters
```
#### Per-Character Configuration (`configs/filename.conf`)
Create a `.conf` file with the same name as your pdump file to specify custom import options:
**Example: `configs/mycharacter.conf`**
```ini
# Target account (required if not set globally)
account=testuser
# Rename character during import (optional)
name=NewCharacterName
# Force specific GUID (optional, auto-assigned if not specified)
guid=5000
```
### Command Line Usage
#### Import All Files
```bash
# Use environment settings
./scripts/bash/import-pdumps.sh
# Override settings
./scripts/bash/import-pdumps.sh --password mypass --account testuser
```
#### Import Single File
```bash
# Direct import with pdump-import.sh
./scripts/bash/pdump-import.sh --file character.pdump --account testuser --password mypass
# With character rename
./scripts/bash/pdump-import.sh --file oldchar.pdump --account newuser --name "NewName" --password mypass
# Validate before import (dry run)
./scripts/bash/pdump-import.sh --file character.pdump --account testuser --password mypass --dry-run
```
## 🛠️ Advanced Features
### Account Management
- **Account Validation**: Scripts automatically verify that target accounts exist
- **Account ID or Name**: You can use either account names or numeric IDs
- **Interactive Mode**: If no account is specified, you'll be prompted to enter one
### GUID Handling
- **Auto-Assignment**: Next available GUID is automatically assigned
- **Force GUID**: Use `--guid` parameter or config file to force specific GUID
- **Conflict Detection**: Import fails safely if GUID already exists
### Character Names
- **Validation**: Character names must follow WoW naming rules (2-12 letters)
- **Uniqueness**: Import fails if character name already exists on server
- **Renaming**: Use `--name` parameter or config file to rename during import
### Safety Features
- **Automatic Backup**: Characters database is backed up before each import
- **Server Management**: World server is safely stopped/restarted during import
- **Rollback Ready**: Backups are stored in `manual-backups/` directory
- **Dry Run**: Validate imports without actually importing
## 📋 Import Workflow
1. **Validation Phase**
- Check file format and readability
- Validate target account exists
- Verify character name availability (if specified)
- Check GUID conflicts
2. **Pre-Import Phase**
- Create automatic database backup
- Stop world server for safe import
3. **Processing Phase**
- Process SQL file (update account references, GUID, name)
- Import character data into database
4. **Post-Import Phase**
- Restart world server
- Verify import success
- Move processed files to `processed/` directory
## 🚨 Important Notes
### Before You Import
- **Backup Your Database**: Always backup before importing characters
- **Account Required**: Target account must exist in your auth database
- **Unique Names**: Character names must be unique across the entire server
- **Server Downtime**: World server is briefly restarted during import
### PDump Limitations
The AzerothCore pdump system has some known limitations:
- **Guild Data**: Guild information is not included in pdump files
- **Module Data**: Some module-specific data (transmog, reagent bank) may not transfer
- **Version Compatibility**: Pdump files from different database versions may have issues
### Troubleshooting
- **"Account not found"**: Verify account exists in auth database
- **"Character name exists"**: Use `--name` to rename or choose different name
- **"GUID conflicts"**: Use `--guid` to force different GUID or let system auto-assign
- **"Database errors"**: Check that pdump file is compatible with your database version
## 📚 Examples
### Basic Import
```bash
# Place file and import
cp character.pdump import/pdumps/
./scripts/bash/import-pdumps.sh --password mypass --account testuser
```
### Batch Import with Configuration
```bash
# Set up multiple characters
cp char1.pdump import/pdumps/
cp char2.pdump import/pdumps/
# Configure individual characters
echo "account=user1" > import/pdumps/configs/char1.conf
echo "account=user2
name=RenamedChar" > import/pdumps/configs/char2.conf
# Import all
./scripts/bash/import-pdumps.sh --password mypass
```
### Single Character Import
```bash
./scripts/bash/pdump-import.sh \
--file character.pdump \
--account testuser \
--name "MyNewCharacter" \
--password mypass
```
## 🔗 Related Documentation
- [Database Management](../../docs/DATABASE_MANAGEMENT.md)
- [Backup System](../../docs/TROUBLESHOOTING.md#backup-system)
- [Getting Started Guide](../../docs/GETTING_STARTED.md)

View File

@@ -1,43 +0,0 @@
#!/bin/bash
# Example batch import script
# This shows how to import multiple characters with different configurations
set -euo pipefail
MYSQL_PASSWORD="your_mysql_password_here"
echo "Setting up character import batch..."
# Create character-specific configurations
mkdir -p ../configs
# Character 1: Import to specific account
cat > ../configs/warrior.conf <<EOF
account=player1
EOF
# Character 2: Import with rename
cat > ../configs/mage.conf <<EOF
account=player2
name=NewMageName
EOF
# Character 3: Import with forced GUID
cat > ../configs/priest.conf <<EOF
account=player3
name=HolyPriest
guid=5000
EOF
echo "Configuration files created!"
echo ""
echo "Now place your pdump files:"
echo " warrior.pdump -> ../warrior.pdump"
echo " mage.pdump -> ../mage.pdump"
echo " priest.pdump -> ../priest.pdump"
echo ""
echo "Then run the import:"
echo " ../../../scripts/bash/import-pdumps.sh --password $MYSQL_PASSWORD"
echo ""
echo "Or import individually:"
echo " ../../../scripts/bash/pdump-import.sh --file ../warrior.pdump --account player1 --password $MYSQL_PASSWORD"

View File

@@ -1,20 +0,0 @@
# Example character import configuration
# Copy this file to configs/yourcharacter.conf and modify as needed
# Target account (required if DEFAULT_IMPORT_ACCOUNT is not set)
# Can be account name or account ID
account=testuser
# Rename character during import (optional)
# Must follow WoW naming rules: 2-12 letters, no numbers/special chars
name=NewCharacterName
# Force specific character GUID (optional)
# If not specified, next available GUID will be used automatically
# guid=5000
# Additional notes:
# - Account must exist in auth database before import
# - Character names must be unique across the server
# - GUID conflicts will cause import to fail
# - Use dry-run mode to test before actual import

View File

@@ -195,8 +195,10 @@ else
# Step 3: Update realmlist table
echo ""
echo "🌐 Step 3: Updating realmlist table..."
echo " 🔧 Setting realm address to: ${SERVER_ADDRESS}:${REALM_PORT}"
mysql -h "${MYSQL_HOST}" -u"${MYSQL_USER}" -p"${MYSQL_ROOT_PASSWORD}" --skip-ssl-verify "${DB_AUTH_NAME}" -e "
UPDATE realmlist SET address='${SERVER_ADDRESS}', port=${REALM_PORT} WHERE id=1;
SELECT CONCAT(' ✓ Realm configured: ', name, ' at ', address, ':', port) AS status FROM realmlist WHERE id=1;
" || echo "⚠️ Could not update realmlist table"
echo "✅ Realmlist updated"

View File

@@ -7,6 +7,17 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$SCRIPT_DIR"
# Source common libraries for standardized functionality
if ! source "$SCRIPT_DIR/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load $SCRIPT_DIR/lib/common.sh" >&2
exit 1
fi
# Source utility libraries
source "$SCRIPT_DIR/lib/mysql-utils.sh" 2>/dev/null || warn "MySQL utilities not available"
source "$SCRIPT_DIR/lib/docker-utils.sh" 2>/dev/null || warn "Docker utilities not available"
source "$SCRIPT_DIR/lib/env-utils.sh" 2>/dev/null || warn "Environment utilities not available"
# Load environment defaults if present
if [ -f "$PROJECT_ROOT/.env" ]; then
set -a
@@ -63,7 +74,7 @@ Examples:
EOF
}
err(){ printf 'Error: %s\n' "$*" >&2; }
# Use standardized error function from lib/common.sh
die(){ err "$1"; exit 1; }
normalize_token(){
@@ -104,10 +115,14 @@ remove_from_list(){
arr=("${filtered[@]}")
}
# Use env-utils.sh function if available, fallback to local implementation
resolve_relative(){
local base="$1" path="$2"
if command -v python3 >/dev/null 2>&1; then
python3 - "$base" "$path" <<'PY'
if command -v path_resolve_absolute >/dev/null 2>&1; then
path_resolve_absolute "$2" "$1"
else
local base="$1" path="$2"
if command -v python3 >/dev/null 2>&1; then
python3 - "$base" "$path" <<'PY'
import os, sys
base, path = sys.argv[1:3]
if not path:
@@ -117,8 +132,9 @@ elif os.path.isabs(path):
else:
print(os.path.normpath(os.path.join(base, path)))
PY
else
die "python3 is required but was not found on PATH"
else
die "python3 is required but was not found on PATH"
fi
fi
}
@@ -248,7 +264,13 @@ generated_at="$(date --iso-8601=seconds)"
dump_db(){
local schema="$1" outfile="$2"
echo "Dumping ${schema} -> ${outfile}"
docker exec "$MYSQL_CONTAINER" mysqldump -uroot -p"$MYSQL_PW" "$schema" | gzip > "$outfile"
# Use mysql-utils.sh function if available, fallback to direct command
if command -v mysql_backup_database >/dev/null 2>&1; then
mysql_backup_database "$schema" "$outfile" "gzip" "$MYSQL_CONTAINER" "$MYSQL_PW"
else
docker exec "$MYSQL_CONTAINER" mysqldump -uroot -p"$MYSQL_PW" "$schema" | gzip > "$outfile"
fi
}
for db in "${ACTIVE_DBS[@]}"; do

View File

@@ -6,15 +6,19 @@ INVOCATION_DIR="$PWD"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
COLOR_RED='\033[0;31m'
COLOR_GREEN='\033[0;32m'
COLOR_YELLOW='\033[1;33m'
COLOR_RESET='\033[0m'
# Source common libraries for standardized functionality
if ! source "$SCRIPT_DIR/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load $SCRIPT_DIR/lib/common.sh" >&2
exit 1
fi
log(){ printf '%b\n' "${COLOR_GREEN}$*${COLOR_RESET}"; }
warn(){ printf '%b\n' "${COLOR_YELLOW}$*${COLOR_RESET}"; }
err(){ printf '%b\n' "${COLOR_RED}$*${COLOR_RESET}"; }
fatal(){ err "$*"; exit 1; }
# Source utility libraries
source "$SCRIPT_DIR/lib/mysql-utils.sh" 2>/dev/null || warn "MySQL utilities not available"
source "$SCRIPT_DIR/lib/docker-utils.sh" 2>/dev/null || warn "Docker utilities not available"
source "$SCRIPT_DIR/lib/env-utils.sh" 2>/dev/null || warn "Environment utilities not available"
# Use log() for main output to maintain existing behavior
log() { ok "$*"; }
SUPPORTED_DBS=(auth characters world)
declare -A SUPPORTED_SET=()
@@ -102,10 +106,14 @@ remove_from_list(){
arr=("${filtered[@]}")
}
# Use env-utils.sh function if available, fallback to local implementation
resolve_relative(){
local base="$1" path="$2"
if command -v python3 >/dev/null 2>&1; then
python3 - "$base" "$path" <<'PY'
if command -v path_resolve_absolute >/dev/null 2>&1; then
path_resolve_absolute "$2" "$1"
else
local base="$1" path="$2"
if command -v python3 >/dev/null 2>&1; then
python3 - "$base" "$path" <<'PY'
import os, sys
base, path = sys.argv[1:3]
if not path:
@@ -115,8 +123,9 @@ elif os.path.isabs(path):
else:
print(os.path.normpath(os.path.join(base, path)))
PY
else
fatal "python3 is required but was not found on PATH"
else
fatal "python3 is required but was not found on PATH"
fi
fi
}
@@ -280,7 +289,13 @@ backup_db(){
local out="manual-backups/${label}-pre-import-$(timestamp).sql"
mkdir -p manual-backups
log "Backing up current ${schema} to ${out}"
docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$schema" > "$out"
# Use mysql-utils.sh function if available, fallback to direct command
if command -v mysql_backup_database >/dev/null 2>&1; then
mysql_backup_database "$schema" "$out" "none" "ac-mysql" "$MYSQL_PW"
else
docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$schema" > "$out"
fi
}
restore(){
@@ -302,7 +317,22 @@ db_selected(){
}
count_rows(){
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e "$1"
# Use mysql-utils.sh function if available, fallback to direct command
if command -v docker_mysql_query >/dev/null 2>&1; then
# Extract database name from query for mysql-utils function
local query="$1"
local db_name
# Simple extraction - assumes "FROM database.table" or "database.table" pattern
if [[ "$query" =~ FROM[[:space:]]+([^.[:space:]]+)\. ]]; then
db_name="${BASH_REMATCH[1]}"
docker_mysql_query "$db_name" "$query" "ac-mysql" "$MYSQL_PW"
else
# Fallback to original method if can't parse database
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e "$query"
fi
else
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e "$1"
fi
}
case "${1:-}" in

View File

@@ -6,18 +6,14 @@ set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
COLOR_RED='\033[0;31m'
COLOR_GREEN='\033[0;32m'
COLOR_YELLOW='\033[1;33m'
COLOR_BLUE='\033[0;34m'
COLOR_CYAN='\033[0;36m'
COLOR_RESET='\033[0m'
# Source common library for standardized logging
if ! source "$SCRIPT_DIR/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load $SCRIPT_DIR/lib/common.sh" >&2
exit 1
fi
log(){ printf '%b\n' "${COLOR_GREEN}$*${COLOR_RESET}"; }
info(){ printf '%b\n' "${COLOR_CYAN}$*${COLOR_RESET}"; }
warn(){ printf '%b\n' "${COLOR_YELLOW}$*${COLOR_RESET}"; }
err(){ printf '%b\n' "${COLOR_RED}$*${COLOR_RESET}"; }
fatal(){ err "$*"; exit 1; }
# Use log() instead of info() for main output to maintain existing behavior
log() { ok "$*"; }
MYSQL_PW=""
BACKUP_DIR=""

View File

@@ -1,584 +0,0 @@
#!/bin/bash
#
# AzerothCore Bulk 2FA Setup Script
# Generates and configures TOTP 2FA for multiple accounts
#
# Usage: ./scripts/bash/bulk-2fa-setup.sh [OPTIONS]
#
set -e
# Script directory for relative imports
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Source common utilities
source "$SCRIPT_DIR/lib/common.sh"
# Set environment paths
ENV_PATH="${ENV_PATH:-$PROJECT_ROOT/.env}"
DEFAULT_ENV_PATH="$PROJECT_ROOT/.env"
# =============================================================================
# GLOBAL VARIABLES
# =============================================================================
# Command line options
OPT_ALL=false
OPT_ACCOUNTS=()
OPT_FORCE=false
OPT_OUTPUT_DIR=""
OPT_DRY_RUN=false
OPT_ISSUER="AzerothCore"
OPT_FORMAT="qr"
# Container and database settings
WORLDSERVER_CONTAINER="ac-worldserver"
DATABASE_CONTAINER="ac-mysql"
MYSQL_PASSWORD=""
# SOAP settings for official AzerothCore API
SOAP_HOST="localhost"
SOAP_PORT="7778"
SOAP_USERNAME=""
SOAP_PASSWORD=""
# Output paths
OUTPUT_BASE_DIR=""
QR_CODES_DIR=""
SETUP_REPORT=""
CONSOLE_COMMANDS=""
SECRETS_BACKUP=""
# =============================================================================
# USAGE AND HELP
# =============================================================================
show_usage() {
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Bulk 2FA setup for AzerothCore accounts using official SOAP API"
echo ""
echo "Options:"
echo " --all Process all non-bot accounts without 2FA"
echo " --account USERNAME Process specific account (can be repeated)"
echo " --force Regenerate 2FA even if already exists"
echo " --output-dir PATH Custom output directory"
echo " --dry-run Show what would be done without executing"
echo " --issuer NAME Issuer name for TOTP (default: AzerothCore)"
echo " --format [qr|manual] Output QR codes or manual setup info"
echo " --soap-user USERNAME SOAP API username (required)"
echo " --soap-pass PASSWORD SOAP API password (required)"
echo " -h, --help Show this help message"
echo ""
echo "Examples:"
echo " $0 --all # Setup 2FA for all accounts"
echo " $0 --account user1 --account user2 # Setup for specific accounts"
echo " $0 --all --force --issuer MyServer # Force regenerate with custom issuer"
echo " $0 --all --dry-run # Preview what would be done"
echo ""
echo "Requirements:"
echo " - AzerothCore worldserver with SOAP enabled on port 7778"
echo " - GM account with sufficient privileges for SOAP access"
echo " - Remote Access (Ra.Enable = 1) enabled in worldserver.conf"
}
# =============================================================================
# UTILITY FUNCTIONS
# =============================================================================
# Check if required containers are running and healthy
check_containers() {
info "Checking container status..."
# Check worldserver container
if ! docker ps --format '{{.Names}}' | grep -q "^${WORLDSERVER_CONTAINER}$"; then
fatal "Container $WORLDSERVER_CONTAINER is not running"
fi
# Check if database container exists
if ! docker ps --format '{{.Names}}' | grep -q "^${DATABASE_CONTAINER}$"; then
fatal "Container $DATABASE_CONTAINER is not running"
fi
# Test database connectivity
if ! docker exec "$WORLDSERVER_CONTAINER" mysql -h "$DATABASE_CONTAINER" -u root -p"$MYSQL_PASSWORD" acore_auth -e "SELECT 1;" &>/dev/null; then
fatal "Cannot connect to AzerothCore database"
fi
# Test SOAP connectivity (only if credentials are available)
if [ -n "$SOAP_USERNAME" ] && [ -n "$SOAP_PASSWORD" ]; then
info "Testing SOAP API connectivity..."
if ! soap_result=$(soap_execute_command "server info"); then
fatal "Cannot connect to SOAP API: $soap_result"
fi
ok "SOAP API is accessible"
fi
ok "Containers are healthy and accessible"
}
# Execute MySQL query via container
mysql_query() {
local query="$1"
local database="${2:-acore_auth}"
docker exec "$WORLDSERVER_CONTAINER" mysql \
-h "$DATABASE_CONTAINER" \
-u root \
-p"$MYSQL_PASSWORD" \
"$database" \
-e "$query" \
2>/dev/null
}
# Execute SOAP command via AzerothCore official API
soap_execute_command() {
local command="$1"
local response
# Construct SOAP XML request
local soap_request='<?xml version="1.0" encoding="UTF-8"?>
<SOAP-ENV:Envelope
xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/"
xmlns:SOAP-ENC="http://schemas.xmlsoap.org/soap/encoding/"
xmlns:xsi="http://www.w3.org/1999/XMLSchema-instance"
xmlns:xsd="http://www.w3.org/1999/XMLSchema"
xmlns:ns1="urn:AC">
<SOAP-ENV:Body>
<ns1:executeCommand>
<command>'"$command"'</command>
</ns1:executeCommand>
</SOAP-ENV:Body>
</SOAP-ENV:Envelope>'
# Execute SOAP request
response=$(curl -s -X POST \
-H "Content-Type: text/xml" \
--user "$SOAP_USERNAME:$SOAP_PASSWORD" \
-d "$soap_request" \
"http://$SOAP_HOST:$SOAP_PORT/" 2>/dev/null)
# Flatten response for reliable parsing
local flat_response
flat_response=$(echo "$response" | tr -d '\n' | sed 's/\r//g')
# Check if response contains fault
if echo "$flat_response" | grep -q "SOAP-ENV:Fault"; then
# Extract fault string for error reporting
echo "$flat_response" | sed -n 's/.*<faultstring>\(.*\)<\/faultstring>.*/\1/p' | sed 's/&#xD;//g'
return 1
fi
# Extract successful result
echo "$flat_response" | sed -n 's/.*<result>\(.*\)<\/result>.*/\1/p' | sed 's/&#xD;//g'
return 0
}
# Generate Base32 TOTP secret
generate_totp_secret() {
# Use existing generation logic from generate-2fa-qr.sh
if command -v base32 >/dev/null 2>&1; then
openssl rand 10 | base32 -w0 | head -c16
else
# Fallback using Python
python3 -c "
import base64
import os
secret_bytes = os.urandom(10)
secret_b32 = base64.b32encode(secret_bytes).decode('ascii').rstrip('=')
print(secret_b32[:16])
"
fi
}
# Validate Base32 secret format
validate_base32_secret() {
local secret="$1"
if [[ ! "$secret" =~ ^[A-Z2-7]+$ ]]; then
return 1
fi
if [ ${#secret} -ne 16 ]; then
err "AzerothCore SOAP requires a 16-character Base32 secret (got ${#secret})"
return 1
fi
return 0
}
# =============================================================================
# ACCOUNT DISCOVERY FUNCTIONS
# =============================================================================
# Get all accounts that need 2FA setup
get_accounts_needing_2fa() {
local force="$1"
local query
if [ "$force" = "true" ]; then
# Include accounts that already have 2FA when force is enabled
query="SELECT username FROM account
WHERE username NOT LIKE 'rndbot%'
AND username NOT LIKE 'playerbot%'
ORDER BY username;"
else
# Only accounts without 2FA
query="SELECT username FROM account
WHERE (totp_secret IS NULL OR totp_secret = '')
AND username NOT LIKE 'rndbot%'
AND username NOT LIKE 'playerbot%'
ORDER BY username;"
fi
mysql_query "$query" | tail -n +2 # Remove header row
}
# Check if specific account exists
account_exists() {
local username="$1"
local result
result=$(mysql_query "SELECT COUNT(*) FROM account WHERE username = '$username';" | tail -n +2)
[ "$result" -eq 1 ]
}
# Check if account already has 2FA
account_has_2fa() {
local username="$1"
local result
result=$(mysql_query "SELECT COUNT(*) FROM account WHERE username = '$username' AND totp_secret IS NOT NULL AND totp_secret != '';" | tail -n +2)
[ "$result" -eq 1 ]
}
# =============================================================================
# 2FA SETUP FUNCTIONS
# =============================================================================
# Generate and set up 2FA for a single account
setup_2fa_for_account() {
local username="$1"
local force="$2"
local secret=""
local qr_output=""
info "Processing account: $username"
# Check if account exists
if ! account_exists "$username"; then
err "Account '$username' does not exist, skipping"
return 1
fi
# Check if account already has 2FA
if account_has_2fa "$username" && [ "$force" != "true" ]; then
warn "Account '$username' already has 2FA configured, use --force to regenerate"
return 0
fi
# Generate TOTP secret
secret=$(generate_totp_secret)
if [ -z "$secret" ] || ! validate_base32_secret "$secret"; then
err "Failed to generate valid TOTP secret for $username"
return 1
fi
if [ "$OPT_DRY_RUN" = "true" ]; then
log "DRY RUN: Would set 2FA secret for $username: $secret"
return 0
fi
# Set 2FA using official AzerothCore SOAP API
local soap_result
if ! soap_result=$(soap_execute_command ".account set 2fa $username $secret"); then
err "Failed to set 2FA for $username via SOAP API: $soap_result"
return 1
fi
# Verify success message
if ! echo "$soap_result" | grep -q "Successfully enabled two-factor authentication"; then
err "Unexpected SOAP response for $username: $soap_result"
return 1
fi
# Generate QR code if format is 'qr'
if [ "$OPT_FORMAT" = "qr" ]; then
qr_output="$QR_CODES_DIR/${username}_2fa_qr.png"
if ! "$SCRIPT_DIR/generate-2fa-qr.sh" -u "$username" -s "$secret" -i "$OPT_ISSUER" -o "$qr_output" >/dev/null; then
warn "Failed to generate QR code for $username, but secret was saved"
fi
fi
# Log setup information
echo "$username,$secret,$(date -u +"%Y-%m-%d %H:%M:%S UTC")" >> "$SECRETS_BACKUP"
echo "account set 2fa $username $secret" >> "$CONSOLE_COMMANDS"
ok "2FA configured for account: $username"
return 0
}
# =============================================================================
# OUTPUT AND REPORTING FUNCTIONS
# =============================================================================
# Create output directory structure
create_output_structure() {
local timestamp
timestamp=$(date +"%Y%m%d%H%M%S")
if [ -n "$OPT_OUTPUT_DIR" ]; then
OUTPUT_BASE_DIR="$OPT_OUTPUT_DIR"
else
OUTPUT_BASE_DIR="$PROJECT_ROOT/2fa-setup-$timestamp"
fi
# Create directories
mkdir -p "$OUTPUT_BASE_DIR"
QR_CODES_DIR="$OUTPUT_BASE_DIR/qr-codes"
mkdir -p "$QR_CODES_DIR"
# Set up output files
SETUP_REPORT="$OUTPUT_BASE_DIR/setup-report.txt"
CONSOLE_COMMANDS="$OUTPUT_BASE_DIR/console-commands.txt"
SECRETS_BACKUP="$OUTPUT_BASE_DIR/secrets-backup.csv"
# Initialize files
echo "# AzerothCore 2FA Console Commands" > "$CONSOLE_COMMANDS"
echo "# Generated on $(date)" >> "$CONSOLE_COMMANDS"
echo "" >> "$CONSOLE_COMMANDS"
echo "username,secret,generated_date" > "$SECRETS_BACKUP"
info "Output directory: $OUTPUT_BASE_DIR"
}
# Generate final setup report
generate_setup_report() {
local total_processed="$1"
local successful="$2"
local failed="$3"
{
echo "AzerothCore Bulk 2FA Setup Report"
echo "================================="
echo ""
echo "Generated: $(date)"
echo "Command: $0 $*"
echo ""
echo "Summary:"
echo "--------"
echo "Total accounts processed: $total_processed"
echo "Successfully configured: $successful"
echo "Failed: $failed"
echo ""
echo "Output Files:"
echo "-------------"
echo "- QR Codes: $QR_CODES_DIR/"
echo "- Console Commands: $CONSOLE_COMMANDS"
echo "- Secrets Backup: $SECRETS_BACKUP"
echo ""
echo "Next Steps:"
echo "-----------"
echo "1. Distribute QR codes to users securely"
echo "2. Users scan QR codes with authenticator apps"
echo "3. Verify setup using console commands if needed"
echo "4. Store secrets backup securely and delete when no longer needed"
echo ""
echo "Security Notes:"
echo "--------------"
echo "- QR codes contain sensitive TOTP secrets"
echo "- Secrets backup file contains plaintext secrets"
echo "- Delete or encrypt these files after distribution"
echo "- Secrets are also stored in AzerothCore database"
} > "$SETUP_REPORT"
info "Setup report generated: $SETUP_REPORT"
}
# =============================================================================
# MAIN SCRIPT LOGIC
# =============================================================================
# Parse command line arguments
parse_arguments() {
while [[ $# -gt 0 ]]; do
case $1 in
--all)
OPT_ALL=true
shift
;;
--account)
if [ -z "$2" ]; then
fatal "Option --account requires a username argument"
fi
OPT_ACCOUNTS+=("$2")
shift 2
;;
--force)
OPT_FORCE=true
shift
;;
--output-dir)
if [ -z "$2" ]; then
fatal "Option --output-dir requires a path argument"
fi
OPT_OUTPUT_DIR="$2"
shift 2
;;
--dry-run)
OPT_DRY_RUN=true
shift
;;
--issuer)
if [ -z "$2" ]; then
fatal "Option --issuer requires a name argument"
fi
OPT_ISSUER="$2"
shift 2
;;
--format)
if [ -z "$2" ]; then
fatal "Option --format requires qr or manual"
fi
if [[ "$2" != "qr" && "$2" != "manual" ]]; then
fatal "Format must be 'qr' or 'manual'"
fi
OPT_FORMAT="$2"
shift 2
;;
--soap-user)
if [ -z "$2" ]; then
fatal "Option --soap-user requires a username argument"
fi
SOAP_USERNAME="$2"
shift 2
;;
--soap-pass)
if [ -z "$2" ]; then
fatal "Option --soap-pass requires a password argument"
fi
SOAP_PASSWORD="$2"
shift 2
;;
-h|--help)
show_usage
exit 0
;;
*)
fatal "Unknown option: $1"
;;
esac
done
}
# Main execution function
main() {
local accounts_to_process=()
local total_processed=0
local successful=0
local failed=0
# Show help if no arguments were provided
if [ $# -eq 0 ]; then
show_usage
exit 1
fi
# Parse arguments
parse_arguments "$@"
# Validate options
if [ "$OPT_ALL" = "false" ] && [ ${#OPT_ACCOUNTS[@]} -eq 0 ]; then
fatal "Must specify either --all or --account USERNAME"
fi
if [ "$OPT_ALL" = "true" ] && [ ${#OPT_ACCOUNTS[@]} -gt 0 ]; then
fatal "Cannot use --all with specific --account options"
fi
# Load environment variables
MYSQL_PASSWORD=$(read_env "MYSQL_ROOT_PASSWORD" "")
if [ -z "$MYSQL_PASSWORD" ]; then
fatal "MYSQL_ROOT_PASSWORD not found in environment"
fi
# Require SOAP credentials via CLI flags
if [ -z "$SOAP_USERNAME" ] || [ -z "$SOAP_PASSWORD" ]; then
fatal "SOAP credentials required. Provide --soap-user and --soap-pass."
fi
# Check container health
check_containers
# Create output structure
create_output_structure
# Determine accounts to process
if [ "$OPT_ALL" = "true" ]; then
info "Discovering accounts that need 2FA setup..."
readarray -t accounts_to_process < <(get_accounts_needing_2fa "$OPT_FORCE")
if [ ${#accounts_to_process[@]} -eq 0 ]; then
if [ "$OPT_FORCE" = "true" ]; then
warn "No accounts found in database"
else
ok "All accounts already have 2FA configured"
fi
exit 0
fi
info "Found ${#accounts_to_process[@]} accounts to process"
else
accounts_to_process=("${OPT_ACCOUNTS[@]}")
fi
# Display dry run information
if [ "$OPT_DRY_RUN" = "true" ]; then
warn "DRY RUN MODE - No changes will be made"
info "Would process the following accounts:"
for account in "${accounts_to_process[@]}"; do
echo " - $account"
done
echo ""
fi
# Process each account
info "Processing ${#accounts_to_process[@]} accounts..."
for account in "${accounts_to_process[@]}"; do
total_processed=$((total_processed + 1))
if setup_2fa_for_account "$account" "$OPT_FORCE"; then
successful=$((successful + 1))
else
failed=$((failed + 1))
fi
done
# Generate final report
if [ "$OPT_DRY_RUN" = "false" ]; then
generate_setup_report "$total_processed" "$successful" "$failed"
# Summary
echo ""
ok "Bulk 2FA setup completed"
info "Processed: $total_processed accounts"
info "Successful: $successful"
info "Failed: $failed"
info "Output directory: $OUTPUT_BASE_DIR"
if [ "$failed" -gt 0 ]; then
warn "Some accounts failed to process. Check the output for details."
exit 1
fi
else
info "Dry run completed. Use without --dry-run to execute."
if [ "$failed" -gt 0 ]; then
warn "Some accounts would fail to process."
exit 1
fi
fi
}
# Execute main function with all arguments
main "$@"

View File

@@ -4,9 +4,31 @@
# automatically rerun db-import-conditional to hydrate from backups.
set -euo pipefail
log(){ echo "🛡️ [db-guard] $*"; }
warn(){ echo "⚠️ [db-guard] $*" >&2; }
err(){ echo "❌ [db-guard] $*" >&2; }
# Source common library if available (container environment)
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [ -f "$SCRIPT_DIR/../scripts/bash/lib/common.sh" ]; then
# Running from project root
source "$SCRIPT_DIR/../scripts/bash/lib/common.sh"
db_guard_log() { info "🛡️ [db-guard] $*"; }
db_guard_warn() { warn "[db-guard] $*"; }
db_guard_err() { err "[db-guard] $*"; }
elif [ -f "$SCRIPT_DIR/lib/common.sh" ]; then
# Running from scripts/bash directory
source "$SCRIPT_DIR/lib/common.sh"
db_guard_log() { info "🛡️ [db-guard] $*"; }
db_guard_warn() { warn "[db-guard] $*"; }
db_guard_err() { err "[db-guard] $*"; }
else
# Fallback for container environment where lib/common.sh may not be available
db_guard_log(){ echo "🛡️ [db-guard] $*"; }
db_guard_warn(){ echo "⚠️ [db-guard] $*" >&2; }
db_guard_err(){ echo "❌ [db-guard] $*" >&2; }
fi
# Maintain compatibility with existing function names
log() { db_guard_log "$*"; }
warn() { db_guard_warn "$*"; }
err() { db_guard_err "$*"; }
MYSQL_HOST="${CONTAINER_MYSQL:-ac-mysql}"
MYSQL_PORT="${MYSQL_PORT:-3306}"

View File

@@ -219,12 +219,10 @@ if [ -z "$backup_path" ]; then
echo "📦 Latest daily backup found: $latest_daily"
for backup_file in "$BACKUP_DIRS/daily/$latest_daily"/*.sql.gz; do
if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then
if timeout 10 gzip -t "$backup_file" >/dev/null 2>&1; then
if timeout 10 zcat "$backup_file" 2>/dev/null | head -20 | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then
echo "✅ Valid daily backup file: $(basename "$backup_file")"
backup_path="$BACKUP_DIRS/daily/$latest_daily"
break 2
else
echo "⚠️ gzip validation failed for $(basename "$backup_file")"
fi
fi
done
@@ -239,12 +237,10 @@ if [ -z "$backup_path" ]; then
echo "📦 Latest hourly backup found: $latest_hourly"
for backup_file in "$BACKUP_DIRS/hourly/$latest_hourly"/*.sql.gz; do
if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then
if timeout 10 gzip -t "$backup_file" >/dev/null 2>&1; then
if timeout 10 zcat "$backup_file" >/dev/null 2>&1; then
echo "✅ Valid hourly backup file: $(basename "$backup_file")"
backup_path="$BACKUP_DIRS/hourly/$latest_hourly"
break 2
else
echo "⚠️ gzip validation failed for $(basename "$backup_file")"
fi
fi
done
@@ -263,12 +259,10 @@ if [ -z "$backup_path" ]; then
echo "🔍 Validating timestamped backup content..."
for backup_file in "$BACKUP_DIRS/$latest_timestamped"/*.sql.gz; do
if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then
if timeout 10 gzip -t "$backup_file" >/dev/null 2>&1; then
if timeout 10 zcat "$backup_file" >/dev/null 2>&1; then
echo "✅ Valid timestamped backup found: $(basename "$backup_file")"
backup_path="$BACKUP_DIRS/$latest_timestamped"
break 2
else
echo "⚠️ gzip validation failed for $(basename "$backup_file")"
fi
fi
done

View File

@@ -6,6 +6,13 @@ set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}" )" && pwd)"
ROOT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Source common library for standardized logging
if ! source "$ROOT_DIR/scripts/bash/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load $ROOT_DIR/scripts/bash/lib/common.sh" >&2
exit 1
fi
DEFAULT_COMPOSE_FILE="$ROOT_DIR/docker-compose.yml"
ENV_FILE="$ROOT_DIR/.env"
TEMPLATE_FILE="$ROOT_DIR/.env.template"
@@ -16,17 +23,6 @@ DEFAULT_PROJECT_NAME="$(project_name::resolve "$ENV_FILE" "$TEMPLATE_FILE")"
source "$ROOT_DIR/scripts/bash/compose_overrides.sh"
declare -a COMPOSE_FILE_ARGS=()
BLUE='\033[0;34m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m'
info(){ echo -e "${BLUE} $*${NC}"; }
ok(){ echo -e "${GREEN}$*${NC}"; }
warn(){ echo -e "${YELLOW}⚠️ $*${NC}"; }
err(){ echo -e "${RED}$*${NC}"; }
read_env(){
local key="$1" default="${2:-}" value=""
if [ -f "$ENV_FILE" ]; then

View File

@@ -1,116 +0,0 @@
#!/usr/bin/env python3
"""
AzerothCore 2FA QR Code Generator (Python version)
Generates TOTP secrets and QR codes for AzerothCore accounts
"""
import argparse
import base64
import os
import sys
import re
def validate_base32(secret):
"""Validate Base32 secret format"""
if not re.match(r'^[A-Z2-7]+$', secret):
print("Error: Invalid Base32 secret. Only A-Z and 2-7 characters allowed.", file=sys.stderr)
return False
if len(secret) != 16:
print(f"Error: AzerothCore SOAP requires a 16-character Base32 secret (got {len(secret)}).", file=sys.stderr)
return False
return True
def generate_secret():
"""Generate a random 16-character Base32 secret (AzerothCore SOAP requirement)"""
secret_bytes = os.urandom(10)
secret_b32 = base64.b32encode(secret_bytes).decode('ascii').rstrip('=')
return secret_b32[:16]
def generate_qr_code(uri, output_path):
"""Generate QR code using available library"""
try:
import qrcode
qr = qrcode.QRCode(
version=1,
error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=6,
border=4,
)
qr.add_data(uri)
qr.make(fit=True)
img = qr.make_image(fill_color="black", back_color="white")
img.save(output_path)
return True
except ImportError:
print("Error: qrcode library not installed.", file=sys.stderr)
print("Install it with: pip3 install qrcode[pil]", file=sys.stderr)
return False
def main():
parser = argparse.ArgumentParser(
description="Generate TOTP secrets and QR codes for AzerothCore 2FA",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
%(prog)s -u john_doe
%(prog)s -u john_doe -o /tmp/qr.png
%(prog)s -u john_doe -s JBSWY3DPEHPK3PXP -i MyServer
"""
)
parser.add_argument('-u', '--username', required=True,
help='Target username for 2FA setup')
parser.add_argument('-o', '--output',
help='Path to save QR code image (default: ./USERNAME_2fa_qr.png)')
parser.add_argument('-s', '--secret',
help='Use existing 16-character Base32 secret (generates random if not provided)')
parser.add_argument('-i', '--issuer', default='AzerothCore',
help='Issuer name for the TOTP entry (default: AzerothCore)')
args = parser.parse_args()
# Set default output path
if not args.output:
args.output = f"./{args.username}_2fa_qr.png"
# Generate or validate secret
if args.secret:
print("Using provided secret...")
if not validate_base32(args.secret):
sys.exit(1)
secret = args.secret
else:
print("Generating new TOTP secret...")
secret = generate_secret()
print(f"Generated secret: {secret}")
# Create TOTP URI
uri = f"otpauth://totp/{args.issuer}:{args.username}?secret={secret}&issuer={args.issuer}"
# Generate QR code
print("Generating QR code...")
if generate_qr_code(uri, args.output):
print(f"✓ QR code generated successfully: {args.output}")
else:
print("\nManual setup information:")
print(f"Secret: {secret}")
print(f"URI: {uri}")
sys.exit(1)
# Display setup information
print("\n=== AzerothCore 2FA Setup Information ===")
print(f"Username: {args.username}")
print(f"Secret: {secret}")
print(f"QR Code: {args.output}")
print(f"Issuer: {args.issuer}")
print("\nNext steps:")
print("1. Share the QR code image with the user")
print("2. User scans QR code with authenticator app")
print("3. Run on AzerothCore console:")
print(f" account set 2fa {args.username} {secret}")
print("4. User can now use 6-digit codes for login")
print("\nSecurity Note: Keep the secret secure and delete the QR code after setup.")
if __name__ == "__main__":
main()

View File

@@ -1,166 +0,0 @@
#!/bin/bash
# AzerothCore 2FA QR Code Generator
# Generates TOTP secrets and QR codes for AzerothCore accounts
set -e
# Color codes for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Function to display usage
show_usage() {
echo "Usage: $0 -u USERNAME [-o OUTPUT_PATH] [-s SECRET] [-i ISSUER]"
echo ""
echo "Options:"
echo " -u USERNAME Target username for 2FA setup (required)"
echo " -o OUTPUT_PATH Path to save QR code image (default: ./USERNAME_2fa_qr.png)"
echo " -s SECRET Use existing 16-character Base32 secret (generates random if not provided)"
echo " -i ISSUER Issuer name for the TOTP entry (default: AzerothCore)"
echo " -h Show this help message"
echo ""
echo "Examples:"
echo " $0 -u john_doe"
echo " $0 -u john_doe -o /tmp/qr.png"
echo " $0 -u john_doe -s JBSWY3DPEHPK3PXP -i MyServer"
}
# Function to validate Base32
validate_base32() {
local secret="$1"
if [[ ! "$secret" =~ ^[A-Z2-7]+$ ]]; then
echo -e "${RED}Error: Invalid Base32 secret. Only A-Z and 2-7 characters allowed.${NC}" >&2
return 1
fi
if [ ${#secret} -ne 16 ]; then
echo -e "${RED}Error: AzerothCore SOAP requires a 16-character Base32 secret (got ${#secret}).${NC}" >&2
return 1
fi
}
# Function to generate Base32 secret
generate_secret() {
# Generate 10 random bytes and encode as 16-character Base32 (AzerothCore SOAP requirement)
if command -v base32 >/dev/null 2>&1; then
openssl rand 10 | base32 -w0 | head -c16
else
# Fallback using Python if base32 command not available
python3 -c "
import base64
import os
secret_bytes = os.urandom(10)
secret_b32 = base64.b32encode(secret_bytes).decode('ascii').rstrip('=')
print(secret_b32[:16])
"
fi
}
# Default values
USERNAME=""
OUTPUT_PATH=""
SECRET=""
ISSUER="AzerothCore"
# Parse command line arguments
while getopts "u:o:s:i:h" opt; do
case ${opt} in
u )
USERNAME="$OPTARG"
;;
o )
OUTPUT_PATH="$OPTARG"
;;
s )
SECRET="$OPTARG"
;;
i )
ISSUER="$OPTARG"
;;
h )
show_usage
exit 0
;;
\? )
echo -e "${RED}Invalid option: $OPTARG${NC}" 1>&2
show_usage
exit 1
;;
: )
echo -e "${RED}Invalid option: $OPTARG requires an argument${NC}" 1>&2
show_usage
exit 1
;;
esac
done
# Validate required parameters
if [ -z "$USERNAME" ]; then
echo -e "${RED}Error: Username is required.${NC}" >&2
show_usage
exit 1
fi
# Set default output path if not provided
if [ -z "$OUTPUT_PATH" ]; then
OUTPUT_PATH="./${USERNAME}_2fa_qr.png"
fi
# Generate secret if not provided
if [ -z "$SECRET" ]; then
echo -e "${BLUE}Generating new TOTP secret...${NC}"
SECRET=$(generate_secret)
if [ -z "$SECRET" ]; then
echo -e "${RED}Error: Failed to generate secret.${NC}" >&2
exit 1
fi
echo -e "${GREEN}Generated secret: $SECRET${NC}"
else
echo -e "${BLUE}Using provided secret...${NC}"
if ! validate_base32 "$SECRET"; then
exit 1
fi
fi
# Create TOTP URI
URI="otpauth://totp/${ISSUER}:${USERNAME}?secret=${SECRET}&issuer=${ISSUER}"
# Check if qrencode is available
if ! command -v qrencode >/dev/null 2>&1; then
echo -e "${RED}Error: qrencode is not installed.${NC}" >&2
echo "Install it with: sudo apt-get install qrencode (Ubuntu/Debian) or brew install qrencode (macOS)"
echo ""
echo -e "${BLUE}Manual setup information:${NC}"
echo "Secret: $SECRET"
echo "URI: $URI"
exit 1
fi
# Generate QR code
echo -e "${BLUE}Generating QR code...${NC}"
if echo "$URI" | qrencode -s 6 -o "$OUTPUT_PATH"; then
echo -e "${GREEN}✓ QR code generated successfully: $OUTPUT_PATH${NC}"
else
echo -e "${RED}Error: Failed to generate QR code.${NC}" >&2
exit 1
fi
# Display setup information
echo ""
echo -e "${YELLOW}=== AzerothCore 2FA Setup Information ===${NC}"
echo "Username: $USERNAME"
echo "Secret: $SECRET"
echo "QR Code: $OUTPUT_PATH"
echo "Issuer: $ISSUER"
echo ""
echo -e "${BLUE}Next steps:${NC}"
echo "1. Share the QR code image with the user"
echo "2. User scans QR code with authenticator app"
echo "3. Run on AzerothCore console:"
echo -e " ${GREEN}account set 2fa $USERNAME $SECRET${NC}"
echo "4. User can now use 6-digit codes for login"
echo ""
echo -e "${YELLOW}Security Note: Keep the secret secure and delete the QR code after setup.${NC}"

View File

@@ -1,283 +0,0 @@
#!/bin/bash
# Process and import character pdump files from import/pdumps/ directory
set -euo pipefail
INVOCATION_DIR="$PWD"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR/../.." # Go to project root
COLOR_RED='\033[0;31m'
COLOR_GREEN='\033[0;32m'
COLOR_YELLOW='\033[1;33m'
COLOR_BLUE='\033[0;34m'
COLOR_RESET='\033[0m'
log(){ printf '%b\n' "${COLOR_GREEN}$*${COLOR_RESET}"; }
warn(){ printf '%b\n' "${COLOR_YELLOW}$*${COLOR_RESET}"; }
err(){ printf '%b\n' "${COLOR_RED}$*${COLOR_RESET}"; }
info(){ printf '%b\n' "${COLOR_BLUE}$*${COLOR_RESET}"; }
fatal(){ err "$*"; exit 1; }
# Source environment variables
if [ -f ".env" ]; then
set -a
source .env
set +a
fi
IMPORT_DIR="./import/pdumps"
MYSQL_PW="${MYSQL_ROOT_PASSWORD:-}"
AUTH_DB="${ACORE_DB_AUTH_NAME:-acore_auth}"
CHARACTERS_DB="${ACORE_DB_CHARACTERS_NAME:-acore_characters}"
DEFAULT_ACCOUNT="${DEFAULT_IMPORT_ACCOUNT:-}"
INTERACTIVE=${INTERACTIVE:-true}
usage(){
cat <<'EOF'
Usage: ./import-pdumps.sh [options]
Automatically process and import all character pdump files from import/pdumps/ directory.
Options:
--password PASS MySQL root password (overrides env)
--account ACCOUNT Default account for imports (overrides env)
--auth-db NAME Auth database name (overrides env)
--characters-db NAME Characters database name (overrides env)
--non-interactive Don't prompt for missing information
-h, --help Show this help and exit
Directory Structure:
import/pdumps/
├── character1.pdump # Will be imported with default settings
├── character2.sql # SQL dump files also supported
└── configs/ # Optional: per-file configuration
├── character1.conf # account=testuser, name=NewName
└── character2.conf # account=12345, guid=5000
Configuration File Format (.conf):
account=target_account_name_or_id
name=new_character_name # Optional: rename character
guid=force_specific_guid # Optional: force GUID
Environment Variables:
MYSQL_ROOT_PASSWORD # MySQL root password
DEFAULT_IMPORT_ACCOUNT # Default account for imports
ACORE_DB_AUTH_NAME # Auth database name
ACORE_DB_CHARACTERS_NAME # Characters database name
Examples:
# Import all pdumps with environment settings
./import-pdumps.sh
# Import with specific password and account
./import-pdumps.sh --password mypass --account testuser
EOF
}
check_dependencies(){
if ! docker ps >/dev/null 2>&1; then
fatal "Docker is not running or accessible"
fi
if ! docker exec ac-mysql mysql --version >/dev/null 2>&1; then
fatal "MySQL container (ac-mysql) is not running or accessible"
fi
}
parse_config_file(){
local config_file="$1"
local -A config=()
if [[ -f "$config_file" ]]; then
while IFS='=' read -r key value; do
# Skip comments and empty lines
[[ "$key" =~ ^[[:space:]]*# ]] && continue
[[ -z "$key" ]] && continue
# Remove leading/trailing whitespace
key=$(echo "$key" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
value=$(echo "$value" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
config["$key"]="$value"
done < "$config_file"
fi
# Export as variables for the calling function
export CONFIG_ACCOUNT="${config[account]:-}"
export CONFIG_NAME="${config[name]:-}"
export CONFIG_GUID="${config[guid]:-}"
}
prompt_for_account(){
local filename="$1"
if [[ "$INTERACTIVE" != "true" ]]; then
fatal "No account specified for $filename and running in non-interactive mode"
fi
echo ""
warn "No account specified for: $filename"
echo "Available options:"
echo " 1. Provide account name or ID"
echo " 2. Skip this file"
echo ""
while true; do
read -p "Enter account name/ID (or 'skip'): " account_input
case "$account_input" in
skip|Skip|SKIP)
return 1
;;
"")
warn "Please enter an account name/ID or 'skip'"
continue
;;
*)
echo "$account_input"
return 0
;;
esac
done
}
process_pdump_file(){
local pdump_file="$1"
local filename
filename=$(basename "$pdump_file")
local config_file="$IMPORT_DIR/configs/${filename%.*}.conf"
info "Processing: $filename"
# Parse configuration file if it exists
parse_config_file "$config_file"
# Determine account
local target_account="${CONFIG_ACCOUNT:-$DEFAULT_ACCOUNT}"
if [[ -z "$target_account" ]]; then
if ! target_account=$(prompt_for_account "$filename"); then
warn "Skipping $filename (no account provided)"
return 0
fi
fi
# Build command arguments
local cmd_args=(
--file "$pdump_file"
--account "$target_account"
--password "$MYSQL_PW"
--auth-db "$AUTH_DB"
--characters-db "$CHARACTERS_DB"
)
# Add optional parameters if specified in config
[[ -n "$CONFIG_NAME" ]] && cmd_args+=(--name "$CONFIG_NAME")
[[ -n "$CONFIG_GUID" ]] && cmd_args+=(--guid "$CONFIG_GUID")
log "Importing $filename to account $target_account"
[[ -n "$CONFIG_NAME" ]] && log " Character name: $CONFIG_NAME"
[[ -n "$CONFIG_GUID" ]] && log " Forced GUID: $CONFIG_GUID"
# Execute the import
if "./scripts/bash/pdump-import.sh" "${cmd_args[@]}"; then
log "✅ Successfully imported: $filename"
# Move processed file to processed/ subdirectory
local processed_dir="$IMPORT_DIR/processed"
mkdir -p "$processed_dir"
mv "$pdump_file" "$processed_dir/"
[[ -f "$config_file" ]] && mv "$config_file" "$processed_dir/"
else
err "❌ Failed to import: $filename"
return 1
fi
}
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--password)
[[ $# -ge 2 ]] || fatal "--password requires a value"
MYSQL_PW="$2"
shift 2
;;
--account)
[[ $# -ge 2 ]] || fatal "--account requires a value"
DEFAULT_ACCOUNT="$2"
shift 2
;;
--auth-db)
[[ $# -ge 2 ]] || fatal "--auth-db requires a value"
AUTH_DB="$2"
shift 2
;;
--characters-db)
[[ $# -ge 2 ]] || fatal "--characters-db requires a value"
CHARACTERS_DB="$2"
shift 2
;;
--non-interactive)
INTERACTIVE=false
shift
;;
-h|--help)
usage
exit 0
;;
*)
fatal "Unknown option: $1"
;;
esac
done
# Validate required parameters
[[ -n "$MYSQL_PW" ]] || fatal "MySQL password required (use --password or set MYSQL_ROOT_PASSWORD)"
# Check dependencies
check_dependencies
# Check if import directory exists and has files
if [[ ! -d "$IMPORT_DIR" ]]; then
info "Import directory doesn't exist: $IMPORT_DIR"
info "Create the directory and place your .pdump or .sql files there."
exit 0
fi
# Find pdump files
shopt -s nullglob
pdump_files=("$IMPORT_DIR"/*.pdump "$IMPORT_DIR"/*.sql)
shopt -u nullglob
if [[ ${#pdump_files[@]} -eq 0 ]]; then
info "No pdump files found in $IMPORT_DIR"
info "Place your .pdump or .sql files in this directory to import them."
exit 0
fi
log "Found ${#pdump_files[@]} pdump file(s) to process"
# Create configs directory if it doesn't exist
mkdir -p "$IMPORT_DIR/configs"
# Process each file
processed=0
failed=0
for pdump_file in "${pdump_files[@]}"; do
if process_pdump_file "$pdump_file"; then
((processed++))
else
((failed++))
fi
done
echo ""
log "Import summary:"
log " ✅ Processed: $processed"
[[ $failed -gt 0 ]] && err " ❌ Failed: $failed"
if [[ $processed -gt 0 ]]; then
log ""
log "Character imports completed! Processed files moved to $IMPORT_DIR/processed/"
log "You can now log in and access your imported characters."
fi

View File

@@ -50,9 +50,9 @@ log() {
printf '%b\n' "${GREEN}$*${NC}"
}
# Log warning messages (yellow with warning icon)
# Log warning messages (yellow with warning icon, to stderr for compatibility)
warn() {
printf '%b\n' "${YELLOW}⚠️ $*${NC}"
printf '%b\n' "${YELLOW}⚠️ $*${NC}" >&2
}
# Log error messages (red with error icon, continues execution)

View File

@@ -0,0 +1,530 @@
#!/bin/bash
#
# Docker utility library for AzerothCore RealmMaster scripts
# This library provides standardized Docker operations, container management,
# and deployment functions.
#
# Usage: source /path/to/scripts/bash/lib/docker-utils.sh
#
# Prevent multiple sourcing
if [ -n "${_DOCKER_UTILS_LIB_LOADED:-}" ]; then
return 0
fi
_DOCKER_UTILS_LIB_LOADED=1
# Source common library for logging functions
DOCKER_UTILS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [ -f "$DOCKER_UTILS_DIR/common.sh" ]; then
source "$DOCKER_UTILS_DIR/common.sh"
elif command -v info >/dev/null 2>&1; then
# Common functions already available
:
else
# Fallback logging functions
info() { printf '\033[0;34m %s\033[0m\n' "$*"; }
warn() { printf '\033[1;33m⚠ %s\033[0m\n' "$*" >&2; }
err() { printf '\033[0;31m❌ %s\033[0m\n' "$*" >&2; }
fatal() { err "$*"; exit 1; }
fi
# =============================================================================
# DOCKER CONTAINER MANAGEMENT
# =============================================================================
# Get container status
# Returns: running, exited, paused, restarting, removing, dead, created, or "not_found"
#
# Usage:
# status=$(docker_get_container_status "ac-mysql")
# if [ "$status" = "running" ]; then
# echo "Container is running"
# fi
#
docker_get_container_status() {
local container_name="$1"
if ! docker ps -a --format "table {{.Names}}\t{{.Status}}" | grep -q "^$container_name"; then
echo "not_found"
return 1
fi
docker inspect --format='{{.State.Status}}' "$container_name" 2>/dev/null || echo "not_found"
}
# Check if container is running
# Returns 0 if running, 1 if not running or not found
#
# Usage:
# if docker_is_container_running "ac-mysql"; then
# echo "MySQL container is running"
# fi
#
docker_is_container_running() {
local container_name="$1"
local status
status=$(docker_get_container_status "$container_name")
[ "$status" = "running" ]
}
# Wait for container to reach desired state
# Returns 0 if container reaches state within timeout, 1 if timeout
#
# Usage:
# docker_wait_for_container_state "ac-mysql" "running" 30
# docker_wait_for_container_state "ac-mysql" "exited" 10
#
docker_wait_for_container_state() {
local container_name="$1"
local desired_state="$2"
local timeout="${3:-30}"
local check_interval="${4:-2}"
local elapsed=0
info "Waiting for container '$container_name' to reach state '$desired_state' (timeout: ${timeout}s)"
while [ $elapsed -lt $timeout ]; do
local current_state
current_state=$(docker_get_container_status "$container_name")
if [ "$current_state" = "$desired_state" ]; then
info "Container '$container_name' reached desired state: $desired_state"
return 0
fi
sleep "$check_interval"
elapsed=$((elapsed + check_interval))
done
err "Container '$container_name' did not reach state '$desired_state' within ${timeout}s (current: $current_state)"
return 1
}
# Execute command in container with retry logic
# Handles container availability and connection issues
#
# Usage:
# docker_exec_with_retry "ac-mysql" "mysql -uroot -ppassword -e 'SELECT 1'"
# echo "SELECT 1" | docker_exec_with_retry "ac-mysql" "mysql -uroot -ppassword"
#
docker_exec_with_retry() {
local container_name="$1"
local command="$2"
local max_attempts="${3:-3}"
local retry_delay="${4:-2}"
local interactive="${5:-false}"
if ! docker_is_container_running "$container_name"; then
err "Container '$container_name' is not running"
return 1
fi
local attempt=1
while [ $attempt -le $max_attempts ]; do
if [ "$interactive" = "true" ]; then
if docker exec -i "$container_name" sh -c "$command"; then
return 0
fi
else
if docker exec "$container_name" sh -c "$command"; then
return 0
fi
fi
if [ $attempt -lt $max_attempts ]; then
warn "Docker exec failed in '$container_name' (attempt $attempt/$max_attempts), retrying in ${retry_delay}s..."
sleep "$retry_delay"
fi
attempt=$((attempt + 1))
done
err "Docker exec failed in '$container_name' after $max_attempts attempts"
return 1
}
# =============================================================================
# DOCKER COMPOSE PROJECT MANAGEMENT
# =============================================================================
# Get project name from environment or docker-compose.yml
# Returns the Docker Compose project name
#
# Usage:
# project_name=$(docker_get_project_name)
# echo "Project: $project_name"
#
docker_get_project_name() {
# Check environment variable first
if [ -n "${COMPOSE_PROJECT_NAME:-}" ]; then
echo "$COMPOSE_PROJECT_NAME"
return 0
fi
# Check for docker-compose.yml name directive
if [ -f "docker-compose.yml" ] && command -v python3 >/dev/null 2>&1; then
local project_name
project_name=$(python3 -c "
import yaml
try:
with open('docker-compose.yml', 'r') as f:
data = yaml.safe_load(f)
print(data.get('name', ''))
except:
print('')
" 2>/dev/null)
if [ -n "$project_name" ]; then
echo "$project_name"
return 0
fi
fi
# Fallback to directory name
basename "$PWD" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]//g'
}
# List containers for current project
# Returns list of container names with optional filtering
#
# Usage:
# containers=$(docker_list_project_containers)
# running_containers=$(docker_list_project_containers "running")
#
docker_list_project_containers() {
local status_filter="${1:-}"
local project_name
project_name=$(docker_get_project_name)
local filter_arg=""
if [ -n "$status_filter" ]; then
filter_arg="--filter status=$status_filter"
fi
# Use project label to find containers
docker ps -a $filter_arg --filter "label=com.docker.compose.project=$project_name" --format "{{.Names}}" 2>/dev/null
}
# Stop project containers gracefully
# Stops containers with configurable timeout
#
# Usage:
# docker_stop_project_containers 30 # Stop with 30s timeout
# docker_stop_project_containers # Use default 10s timeout
#
docker_stop_project_containers() {
local timeout="${1:-10}"
local containers
containers=$(docker_list_project_containers "running")
if [ -z "$containers" ]; then
info "No running containers found for project"
return 0
fi
info "Stopping project containers with ${timeout}s timeout: $containers"
echo "$containers" | xargs -r docker stop -t "$timeout"
}
# Start project containers
# Starts containers that are stopped but exist
#
# Usage:
# docker_start_project_containers
#
docker_start_project_containers() {
local containers
containers=$(docker_list_project_containers "exited")
if [ -z "$containers" ]; then
info "No stopped containers found for project"
return 0
fi
info "Starting project containers: $containers"
echo "$containers" | xargs -r docker start
}
# =============================================================================
# DOCKER IMAGE MANAGEMENT
# =============================================================================
# Get image information for container
# Returns image name:tag for specified container
#
# Usage:
# image=$(docker_get_container_image "ac-mysql")
# echo "MySQL container using image: $image"
#
docker_get_container_image() {
local container_name="$1"
if ! docker_is_container_running "$container_name"; then
# Try to get from stopped container
docker inspect --format='{{.Config.Image}}' "$container_name" 2>/dev/null || echo "unknown"
else
docker inspect --format='{{.Config.Image}}' "$container_name" 2>/dev/null || echo "unknown"
fi
}
# Check if image exists locally
# Returns 0 if image exists, 1 if not found
#
# Usage:
# if docker_image_exists "mysql:8.0"; then
# echo "MySQL image is available"
# fi
#
docker_image_exists() {
local image_name="$1"
docker images --format "{{.Repository}}:{{.Tag}}" | grep -q "^${image_name}$"
}
# Pull image with retry logic
# Handles temporary network issues and registry problems
#
# Usage:
# docker_pull_image_with_retry "mysql:8.0"
# docker_pull_image_with_retry "azerothcore/ac-wotlk-worldserver:latest" 5 10
#
docker_pull_image_with_retry() {
local image_name="$1"
local max_attempts="${2:-3}"
local retry_delay="${3:-5}"
if docker_image_exists "$image_name"; then
info "Image '$image_name' already exists locally"
return 0
fi
local attempt=1
while [ $attempt -le $max_attempts ]; do
info "Pulling image '$image_name' (attempt $attempt/$max_attempts)"
if docker pull "$image_name"; then
info "Successfully pulled image '$image_name'"
return 0
fi
if [ $attempt -lt $max_attempts ]; then
warn "Failed to pull image '$image_name', retrying in ${retry_delay}s..."
sleep "$retry_delay"
fi
attempt=$((attempt + 1))
done
err "Failed to pull image '$image_name' after $max_attempts attempts"
return 1
}
# =============================================================================
# DOCKER COMPOSE OPERATIONS
# =============================================================================
# Validate docker-compose.yml configuration
# Returns 0 if valid, 1 if invalid or errors found
#
# Usage:
# if docker_compose_validate; then
# echo "Docker Compose configuration is valid"
# fi
#
docker_compose_validate() {
local compose_file="${1:-docker-compose.yml}"
if [ ! -f "$compose_file" ]; then
err "Docker Compose file not found: $compose_file"
return 1
fi
if docker compose -f "$compose_file" config --quiet; then
info "Docker Compose configuration is valid"
return 0
else
err "Docker Compose configuration validation failed"
return 1
fi
}
# Get service status from docker-compose
# Returns service status or "not_found" if service doesn't exist
#
# Usage:
# status=$(docker_compose_get_service_status "ac-mysql")
#
docker_compose_get_service_status() {
local service_name="$1"
local project_name
project_name=$(docker_get_project_name)
# Get container name for the service
local container_name="${project_name}-${service_name}-1"
docker_get_container_status "$container_name"
}
# Deploy with profile and options
# Wrapper around docker compose up with standardized options
#
# Usage:
# docker_compose_deploy "services-standard" "--detach"
# docker_compose_deploy "services-modules" "--no-deps ac-worldserver"
#
docker_compose_deploy() {
local profile="${1:-services-standard}"
local additional_options="${2:-}"
if ! docker_compose_validate; then
err "Cannot deploy: Docker Compose configuration is invalid"
return 1
fi
info "Deploying with profile: $profile"
# Use exec to replace current shell for proper signal handling
if [ -n "$additional_options" ]; then
docker compose --profile "$profile" up $additional_options
else
docker compose --profile "$profile" up --detach
fi
}
# =============================================================================
# DOCKER SYSTEM UTILITIES
# =============================================================================
# Check Docker daemon availability
# Returns 0 if Docker is available, 1 if not
#
# Usage:
# if docker_check_daemon; then
# echo "Docker daemon is available"
# fi
#
docker_check_daemon() {
if docker info >/dev/null 2>&1; then
return 0
else
err "Docker daemon is not available or accessible"
return 1
fi
}
# Get Docker system information
# Returns formatted system info for debugging
#
# Usage:
# docker_print_system_info
#
docker_print_system_info() {
info "Docker System Information:"
if ! docker_check_daemon; then
err "Cannot retrieve Docker system information - daemon not available"
return 1
fi
local docker_version compose_version
docker_version=$(docker --version 2>/dev/null | cut -d' ' -f3 | tr -d ',' || echo "unknown")
compose_version=$(docker compose version --short 2>/dev/null || echo "unknown")
info " Docker Version: $docker_version"
info " Compose Version: $compose_version"
info " Project Name: $(docker_get_project_name)"
local running_containers
running_containers=$(docker_list_project_containers "running" | wc -l)
info " Running Containers: $running_containers"
}
# Cleanup unused Docker resources
# Removes stopped containers, unused networks, and dangling images
#
# Usage:
# docker_cleanup_system true # Include unused volumes
# docker_cleanup_system false # Preserve volumes (default)
#
docker_cleanup_system() {
local include_volumes="${1:-false}"
info "Cleaning up Docker system resources..."
# Remove stopped containers
local stopped_containers
stopped_containers=$(docker ps -aq --filter "status=exited")
if [ -n "$stopped_containers" ]; then
info "Removing stopped containers"
echo "$stopped_containers" | xargs docker rm
fi
# Remove unused networks
info "Removing unused networks"
docker network prune -f
# Remove dangling images
info "Removing dangling images"
docker image prune -f
# Remove unused volumes if requested
if [ "$include_volumes" = "true" ]; then
warn "Removing unused volumes (this may delete data!)"
docker volume prune -f
fi
info "Docker system cleanup completed"
}
# =============================================================================
# CONTAINER HEALTH AND MONITORING
# =============================================================================
# Get container resource usage
# Returns CPU and memory usage statistics
#
# Usage:
# docker_get_container_stats "ac-mysql"
#
docker_get_container_stats() {
local container_name="$1"
if ! docker_is_container_running "$container_name"; then
err "Container '$container_name' is not running"
return 1
fi
docker stats --no-stream --format "table {{.Container}}\t{{.CPUPerc}}\t{{.MemUsage}}\t{{.MemPerc}}" "$container_name"
}
# Check container logs for errors
# Searches recent logs for error patterns
#
# Usage:
# docker_check_container_errors "ac-mysql" 100
#
docker_check_container_errors() {
local container_name="$1"
local lines="${2:-50}"
if ! docker ps -a --format "{{.Names}}" | grep -q "^${container_name}$"; then
err "Container '$container_name' not found"
return 1
fi
info "Checking last $lines log lines for errors in '$container_name'"
# Look for common error patterns
docker logs --tail "$lines" "$container_name" 2>&1 | grep -i "error\|exception\|fail\|fatal" || {
info "No obvious errors found in recent logs"
return 0
}
}
# =============================================================================
# INITIALIZATION
# =============================================================================
# Library loaded successfully
# Scripts can check for $_DOCKER_UTILS_LIB_LOADED to verify library is loaded

View File

@@ -0,0 +1,613 @@
#!/bin/bash
#
# Environment and file utility library for AzerothCore RealmMaster scripts
# This library provides enhanced environment variable handling, file operations,
# and path management functions.
#
# Usage: source /path/to/scripts/bash/lib/env-utils.sh
#
# Prevent multiple sourcing
if [ -n "${_ENV_UTILS_LIB_LOADED:-}" ]; then
return 0
fi
_ENV_UTILS_LIB_LOADED=1
# Source common library for logging functions
ENV_UTILS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [ -f "$ENV_UTILS_DIR/common.sh" ]; then
source "$ENV_UTILS_DIR/common.sh"
elif command -v info >/dev/null 2>&1; then
# Common functions already available
:
else
# Fallback logging functions
info() { printf '\033[0;34m %s\033[0m\n' "$*"; }
warn() { printf '\033[1;33m⚠ %s\033[0m\n' "$*" >&2; }
err() { printf '\033[0;31m❌ %s\033[0m\n' "$*" >&2; }
fatal() { err "$*"; exit 1; }
fi
# =============================================================================
# ENVIRONMENT VARIABLE MANAGEMENT
# =============================================================================
# Enhanced read_env function with advanced features
# Supports multiple .env files, environment variable precedence, and validation
#
# Usage:
# value=$(env_read_with_fallback "MYSQL_PASSWORD" "default_password")
# value=$(env_read_with_fallback "PORT" "" ".env.local" "validate_port")
#
env_read_with_fallback() {
local key="$1"
local default="${2:-}"
local env_file="${3:-${ENV_PATH:-${DEFAULT_ENV_PATH:-.env}}}"
local validator_func="${4:-}"
local value=""
# 1. Check if variable is already set in environment (highest precedence)
if [ -n "${!key:-}" ]; then
value="${!key}"
else
# 2. Read from .env file if it exists
if [ -f "$env_file" ]; then
# Extract value using grep and cut, handling various formats
value="$(grep -E "^${key}=" "$env_file" 2>/dev/null | tail -n1 | cut -d'=' -f2- | tr -d '\r')"
# Remove inline comments (everything after # that's not inside quotes)
value="$(echo "$value" | sed 's/[[:space:]]*#.*//' | sed 's/[[:space:]]*$//')"
# Strip quotes if present
if [[ "$value" == \"*\" && "$value" == *\" ]]; then
# Double quotes
value="${value:1:-1}"
elif [[ "$value" == \'*\' && "$value" == *\' ]]; then
# Single quotes
value="${value:1:-1}"
fi
fi
# 3. Use default if still empty
if [ -z "${value:-}" ]; then
value="$default"
fi
fi
# 4. Validate if validator function provided
if [ -n "$validator_func" ] && command -v "$validator_func" >/dev/null 2>&1; then
if ! "$validator_func" "$value"; then
err "Validation failed for $key: $value"
return 1
fi
fi
printf '%s\n' "${value}"
}
# Read environment variable with type conversion
# Supports string, int, bool, and path types
#
# Usage:
# port=$(env_read_typed "MYSQL_PORT" "int" "3306")
# debug=$(env_read_typed "DEBUG" "bool" "false")
# path=$(env_read_typed "DATA_PATH" "path" "/data")
#
env_read_typed() {
local key="$1"
local type="$2"
local default="${3:-}"
local value
value=$(env_read_with_fallback "$key" "$default")
case "$type" in
int|integer)
if ! [[ "$value" =~ ^[0-9]+$ ]]; then
err "Environment variable $key must be an integer: $value"
return 1
fi
echo "$value"
;;
bool|boolean)
case "${value,,}" in
true|yes|1|on|enabled) echo "true" ;;
false|no|0|off|disabled) echo "false" ;;
*) err "Environment variable $key must be boolean: $value"; return 1 ;;
esac
;;
path)
# Expand relative paths to absolute
if [ -n "$value" ]; then
path_resolve_absolute "$value"
fi
;;
string|*)
echo "$value"
;;
esac
}
# Update or add environment variable in .env file with backup
# Creates backup and maintains file integrity
#
# Usage:
# env_update_value "MYSQL_PASSWORD" "new_password"
# env_update_value "DEBUG" "true" ".env.local"
# env_update_value "PORT" "8080" ".env" "true" # create backup
#
env_update_value() {
local key="$1"
local value="$2"
local env_file="${3:-${ENV_PATH:-${DEFAULT_ENV_PATH:-.env}}}"
local create_backup="${4:-false}"
[ -n "$env_file" ] || return 0
# Create backup if requested
if [ "$create_backup" = "true" ] && [ -f "$env_file" ]; then
file_create_backup "$env_file"
fi
# Create file if it doesn't exist
if [ ! -f "$env_file" ]; then
file_ensure_writable_dir "$(dirname "$env_file")"
printf '%s=%s\n' "$key" "$value" >> "$env_file"
return 0
fi
# Update existing or append new
if grep -q "^${key}=" "$env_file"; then
# Use platform-appropriate sed in-place editing
local sed_opts=""
if [[ "$OSTYPE" == "darwin"* ]]; then
sed_opts="-i ''"
else
sed_opts="-i"
fi
# Use a temporary file for safer editing
local temp_file="${env_file}.tmp.$$"
sed "s|^${key}=.*|${key}=${value}|" "$env_file" > "$temp_file" && mv "$temp_file" "$env_file"
else
printf '\n%s=%s\n' "$key" "$value" >> "$env_file"
fi
info "Updated $key in $env_file"
}
# Load multiple environment files with precedence
# Later files override earlier ones
#
# Usage:
# env_load_multiple ".env" ".env.local" ".env.production"
#
env_load_multiple() {
local files=("$@")
local loaded_count=0
for env_file in "${files[@]}"; do
if [ -f "$env_file" ]; then
info "Loading environment from: $env_file"
set -a
# shellcheck disable=SC1090
source "$env_file"
set +a
loaded_count=$((loaded_count + 1))
fi
done
if [ $loaded_count -eq 0 ]; then
warn "No environment files found: ${files[*]}"
return 1
fi
info "Loaded $loaded_count environment file(s)"
return 0
}
# =============================================================================
# PATH AND FILE UTILITIES
# =============================================================================
# Resolve path to absolute form with proper error handling
# Handles both existing and non-existing paths
#
# Usage:
# abs_path=$(path_resolve_absolute "./relative/path")
# abs_path=$(path_resolve_absolute "/already/absolute")
#
path_resolve_absolute() {
local path="$1"
local base_dir="${2:-$PWD}"
if command -v python3 >/dev/null 2>&1; then
python3 - "$base_dir" "$path" <<'PY'
import os, sys
base, path = sys.argv[1:3]
if not path:
print(os.path.abspath(base))
elif os.path.isabs(path):
print(os.path.normpath(path))
else:
print(os.path.normpath(os.path.join(base, path)))
PY
elif command -v realpath >/dev/null 2>&1; then
if [ "${path:0:1}" = "/" ]; then
echo "$path"
else
realpath -m "$base_dir/$path"
fi
else
# Fallback manual resolution
if [ "${path:0:1}" = "/" ]; then
echo "$path"
else
echo "$base_dir/$path"
fi
fi
}
# Ensure directory exists and is writable with proper permissions
# Creates parent directories if needed
#
# Usage:
# file_ensure_writable_dir "/path/to/directory"
# file_ensure_writable_dir "/path/to/directory" "0755"
#
file_ensure_writable_dir() {
local dir="$1"
local permissions="${2:-0755}"
if [ ! -d "$dir" ]; then
if mkdir -p "$dir" 2>/dev/null; then
info "Created directory: $dir"
chmod "$permissions" "$dir" 2>/dev/null || warn "Could not set permissions on $dir"
else
err "Failed to create directory: $dir"
return 1
fi
fi
if [ ! -w "$dir" ]; then
if chmod u+w "$dir" 2>/dev/null; then
info "Made directory writable: $dir"
else
err "Directory not writable and cannot fix permissions: $dir"
return 1
fi
fi
return 0
}
# Create timestamped backup of file
# Supports custom backup directory and compression
#
# Usage:
# file_create_backup "/path/to/important.conf"
# file_create_backup "/path/to/file" "/backup/dir" "gzip"
#
file_create_backup() {
local file="$1"
local backup_dir="${2:-$(dirname "$file")}"
local compression="${3:-none}"
if [ ! -f "$file" ]; then
warn "File does not exist, skipping backup: $file"
return 0
fi
file_ensure_writable_dir "$backup_dir"
local filename basename backup_file
filename=$(basename "$file")
basename="${filename%.*}"
local extension="${filename##*.}"
# Create backup filename with timestamp
if [ "$filename" = "$basename" ]; then
# No extension
backup_file="${backup_dir}/${filename}.backup.$(date +%Y%m%d_%H%M%S)"
else
# Has extension
backup_file="${backup_dir}/${basename}.backup.$(date +%Y%m%d_%H%M%S).${extension}"
fi
case "$compression" in
gzip|gz)
if gzip -c "$file" > "${backup_file}.gz"; then
info "Created compressed backup: ${backup_file}.gz"
else
err "Failed to create compressed backup: ${backup_file}.gz"
return 1
fi
;;
none|*)
if cp "$file" "$backup_file"; then
info "Created backup: $backup_file"
else
err "Failed to create backup: $backup_file"
return 1
fi
;;
esac
return 0
}
# Set file permissions safely with validation
# Handles both numeric and symbolic modes
#
# Usage:
# file_set_permissions "/path/to/file" "0644"
# file_set_permissions "/path/to/script" "u+x"
#
file_set_permissions() {
local file="$1"
local permissions="$2"
local recursive="${3:-false}"
if [ ! -e "$file" ]; then
err "File or directory does not exist: $file"
return 1
fi
local chmod_opts=""
if [ "$recursive" = "true" ] && [ -d "$file" ]; then
chmod_opts="-R"
fi
if chmod $chmod_opts "$permissions" "$file" 2>/dev/null; then
info "Set permissions $permissions on $file"
return 0
else
err "Failed to set permissions $permissions on $file"
return 1
fi
}
# =============================================================================
# CONFIGURATION FILE UTILITIES
# =============================================================================
# Read value from template file with variable expansion support
# Enhanced version supporting more template formats
#
# Usage:
# value=$(config_read_template_value "MYSQL_PASSWORD" ".env.template")
# value=$(config_read_template_value "PORT" "config.template.yml" "yaml")
#
config_read_template_value() {
local key="$1"
local template_file="${2:-${TEMPLATE_FILE:-${TEMPLATE_PATH:-.env.template}}}"
local format="${3:-env}"
if [ ! -f "$template_file" ]; then
err "Template file not found: $template_file"
return 1
fi
case "$format" in
env)
local raw_line value
raw_line=$(grep "^${key}=" "$template_file" 2>/dev/null | head -1)
if [ -z "$raw_line" ]; then
err "Key '$key' not found in template: $template_file"
return 1
fi
value="${raw_line#*=}"
value=$(echo "$value" | sed 's/^"\(.*\)"$/\1/')
# Handle ${VAR:-default} syntax by extracting the default value
if [[ "$value" =~ ^\$\{[^}]*:-([^}]*)\}$ ]]; then
value="${BASH_REMATCH[1]}"
fi
echo "$value"
;;
yaml|yml)
if command -v python3 >/dev/null 2>&1; then
python3 -c "
import yaml, sys
try:
with open('$template_file', 'r') as f:
data = yaml.safe_load(f)
# Simple key lookup - can be enhanced for nested keys
print(data.get('$key', ''))
except:
sys.exit(1)
" 2>/dev/null
else
err "python3 required for YAML template parsing"
return 1
fi
;;
*)
err "Unsupported template format: $format"
return 1
;;
esac
}
# Validate configuration against schema
# Supports basic validation rules
#
# Usage:
# config_validate_env ".env" "required:MYSQL_PASSWORD,PORT;optional:DEBUG"
#
config_validate_env() {
local env_file="$1"
local rules="${2:-}"
if [ ! -f "$env_file" ]; then
err "Environment file not found: $env_file"
return 1
fi
if [ -z "$rules" ]; then
info "No validation rules specified"
return 0
fi
local validation_failed=false
# Parse validation rules
IFS=';' read -ra rule_sets <<< "$rules"
for rule_set in "${rule_sets[@]}"; do
IFS=':' read -ra rule_parts <<< "$rule_set"
local rule_type="${rule_parts[0]}"
local variables="${rule_parts[1]}"
case "$rule_type" in
required)
IFS=',' read -ra req_vars <<< "$variables"
for var in "${req_vars[@]}"; do
if ! grep -q "^${var}=" "$env_file" || [ -z "$(env_read_with_fallback "$var" "" "$env_file")" ]; then
err "Required environment variable missing or empty: $var"
validation_failed=true
fi
done
;;
optional)
# Optional variables - just log if missing
IFS=',' read -ra opt_vars <<< "$variables"
for var in "${opt_vars[@]}"; do
if ! grep -q "^${var}=" "$env_file"; then
info "Optional environment variable not set: $var"
fi
done
;;
esac
done
if [ "$validation_failed" = "true" ]; then
err "Environment validation failed"
return 1
fi
info "Environment validation passed"
return 0
}
# =============================================================================
# SYSTEM UTILITIES
# =============================================================================
# Detect operating system and distribution
# Returns standardized OS identifier
#
# Usage:
# os=$(system_detect_os)
# if [ "$os" = "ubuntu" ]; then
# echo "Running on Ubuntu"
# fi
#
system_detect_os() {
local os="unknown"
if [ -f /etc/os-release ]; then
# Source os-release for distribution info
local id
id=$(grep '^ID=' /etc/os-release | cut -d'=' -f2 | tr -d '"')
case "$id" in
ubuntu|debian|centos|rhel|fedora|alpine|arch)
os="$id"
;;
*)
os="linux"
;;
esac
elif [[ "$OSTYPE" == "darwin"* ]]; then
os="macos"
elif [[ "$OSTYPE" == "cygwin" || "$OSTYPE" == "msys" ]]; then
os="windows"
fi
echo "$os"
}
# Check system requirements
# Validates required commands and versions
#
# Usage:
# system_check_requirements "docker:20.0,python3:3.6"
#
system_check_requirements() {
local requirements="${1:-}"
if [ -z "$requirements" ]; then
return 0
fi
local check_failed=false
IFS=',' read -ra req_list <<< "$requirements"
for requirement in "${req_list[@]}"; do
IFS=':' read -ra req_parts <<< "$requirement"
local command="${req_parts[0]}"
local min_version="${req_parts[1]:-}"
if ! command -v "$command" >/dev/null 2>&1; then
err "Required command not found: $command"
check_failed=true
continue
fi
if [ -n "$min_version" ]; then
# Basic version checking - can be enhanced
info "Found $command (version checking not fully implemented)"
else
info "Found required command: $command"
fi
done
if [ "$check_failed" = "true" ]; then
err "System requirements check failed"
return 1
fi
info "System requirements check passed"
return 0
}
# =============================================================================
# INITIALIZATION AND VALIDATION
# =============================================================================
# Validate environment utility configuration
# Checks that utilities are working correctly
#
# Usage:
# env_utils_validate
#
env_utils_validate() {
info "Validating environment utilities..."
# Test path resolution
local test_path
test_path=$(path_resolve_absolute "." 2>/dev/null)
if [ -z "$test_path" ]; then
err "Path resolution utility not working"
return 1
fi
# Test directory operations
if ! file_ensure_writable_dir "/tmp/env-utils-test.$$"; then
err "Directory utility not working"
return 1
fi
rmdir "/tmp/env-utils-test.$$" 2>/dev/null || true
info "Environment utilities validation successful"
return 0
}
# =============================================================================
# INITIALIZATION
# =============================================================================
# Library loaded successfully
# Scripts can check for $_ENV_UTILS_LIB_LOADED to verify library is loaded

View File

@@ -0,0 +1,376 @@
#!/bin/bash
#
# MySQL utility library for AzerothCore RealmMaster scripts
# This library provides standardized MySQL operations, connection management,
# and database interaction functions.
#
# Usage: source /path/to/scripts/bash/lib/mysql-utils.sh
#
# Prevent multiple sourcing
if [ -n "${_MYSQL_UTILS_LIB_LOADED:-}" ]; then
return 0
fi
_MYSQL_UTILS_LIB_LOADED=1
# Source common library for logging functions
MYSQL_UTILS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [ -f "$MYSQL_UTILS_DIR/common.sh" ]; then
source "$MYSQL_UTILS_DIR/common.sh"
elif command -v info >/dev/null 2>&1; then
# Common functions already available
:
else
# Fallback logging functions
info() { printf '\033[0;34m %s\033[0m\n' "$*"; }
warn() { printf '\033[1;33m⚠ %s\033[0m\n' "$*" >&2; }
err() { printf '\033[0;31m❌ %s\033[0m\n' "$*" >&2; }
fatal() { err "$*"; exit 1; }
fi
# =============================================================================
# MYSQL CONNECTION CONFIGURATION
# =============================================================================
# Default MySQL configuration - can be overridden by environment
MYSQL_HOST="${MYSQL_HOST:-${CONTAINER_MYSQL:-ac-mysql}}"
MYSQL_PORT="${MYSQL_PORT:-3306}"
MYSQL_USER="${MYSQL_USER:-root}"
MYSQL_ROOT_PASSWORD="${MYSQL_ROOT_PASSWORD:-${MYSQL_PW:-azerothcore}}"
MYSQL_CONTAINER="${MYSQL_CONTAINER:-ac-mysql}"
# =============================================================================
# MYSQL CONNECTION FUNCTIONS
# =============================================================================
# Test MySQL connection with current configuration
# Returns 0 if connection successful, 1 if failed
#
# Usage:
# if mysql_test_connection; then
# echo "MySQL is available"
# fi
#
mysql_test_connection() {
local host="${1:-$MYSQL_HOST}"
local port="${2:-$MYSQL_PORT}"
local user="${3:-$MYSQL_USER}"
local password="${4:-$MYSQL_ROOT_PASSWORD}"
MYSQL_PWD="$password" mysql -h "$host" -P "$port" -u "$user" -e "SELECT 1" >/dev/null 2>&1
}
# Wait for MySQL to be ready with timeout
# Returns 0 if MySQL becomes available within timeout, 1 if timeout reached
#
# Usage:
# mysql_wait_for_connection 60 # Wait up to 60 seconds
# mysql_wait_for_connection # Use default 30 second timeout
#
mysql_wait_for_connection() {
local timeout="${1:-30}"
local retry_interval="${2:-2}"
local elapsed=0
info "Waiting for MySQL connection (${MYSQL_HOST}:${MYSQL_PORT}) with ${timeout}s timeout..."
while [ $elapsed -lt $timeout ]; do
if mysql_test_connection; then
info "MySQL connection established"
return 0
fi
sleep "$retry_interval"
elapsed=$((elapsed + retry_interval))
done
err "MySQL connection failed after ${timeout}s timeout"
return 1
}
# Execute MySQL command with retry logic
# Handles both direct queries and piped input
#
# Usage:
# mysql_exec_with_retry "database_name" "SELECT COUNT(*) FROM table;"
# echo "SELECT 1;" | mysql_exec_with_retry "database_name"
# mysql_exec_with_retry "database_name" < script.sql
#
mysql_exec_with_retry() {
local database="$1"
local query="${2:-}"
local max_attempts="${3:-3}"
local retry_delay="${4:-2}"
local attempt=1
while [ $attempt -le $max_attempts ]; do
if [ -n "$query" ]; then
# Direct query execution
if MYSQL_PWD="$MYSQL_ROOT_PASSWORD" mysql -h "$MYSQL_HOST" -P "$MYSQL_PORT" -u "$MYSQL_USER" "$database" -e "$query"; then
return 0
fi
else
# Input from pipe/stdin
if MYSQL_PWD="$MYSQL_ROOT_PASSWORD" mysql -h "$MYSQL_HOST" -P "$MYSQL_PORT" -u "$MYSQL_USER" "$database"; then
return 0
fi
fi
if [ $attempt -lt $max_attempts ]; then
warn "MySQL query failed (attempt $attempt/$max_attempts), retrying in ${retry_delay}s..."
sleep "$retry_delay"
fi
attempt=$((attempt + 1))
done
err "MySQL query failed after $max_attempts attempts"
return 1
}
# Execute MySQL query and return result (no table headers)
# Optimized for single values and parsing
#
# Usage:
# count=$(mysql_query "acore_characters" "SELECT COUNT(*) FROM characters")
# tables=$(mysql_query "information_schema" "SHOW TABLES")
#
mysql_query() {
local database="$1"
local query="$2"
local host="${3:-$MYSQL_HOST}"
local port="${4:-$MYSQL_PORT}"
local user="${5:-$MYSQL_USER}"
local password="${6:-$MYSQL_ROOT_PASSWORD}"
MYSQL_PWD="$password" mysql -h "$host" -P "$port" -u "$user" -N -B "$database" -e "$query" 2>/dev/null
}
# =============================================================================
# DOCKER MYSQL FUNCTIONS
# =============================================================================
# Execute MySQL command inside Docker container
# Wrapper around docker exec with standardized MySQL connection
#
# Usage:
# docker_mysql_exec "acore_auth" "SELECT COUNT(*) FROM account;"
# echo "SELECT 1;" | docker_mysql_exec "acore_auth"
#
docker_mysql_exec() {
local database="$1"
local query="${2:-}"
local container="${3:-$MYSQL_CONTAINER}"
local password="${4:-$MYSQL_ROOT_PASSWORD}"
if [ -n "$query" ]; then
docker exec "$container" mysql -uroot -p"$password" "$database" -e "$query"
else
docker exec -i "$container" mysql -uroot -p"$password" "$database"
fi
}
# Execute MySQL query in Docker container (no table headers)
# Optimized for single values and parsing
#
# Usage:
# count=$(docker_mysql_query "acore_characters" "SELECT COUNT(*) FROM characters")
#
docker_mysql_query() {
local database="$1"
local query="$2"
local container="${3:-$MYSQL_CONTAINER}"
local password="${4:-$MYSQL_ROOT_PASSWORD}"
docker exec "$container" mysql -uroot -p"$password" -N -B "$database" -e "$query" 2>/dev/null
}
# Check if MySQL container is healthy and accepting connections
#
# Usage:
# if docker_mysql_is_ready; then
# echo "MySQL container is ready"
# fi
#
docker_mysql_is_ready() {
local container="${1:-$MYSQL_CONTAINER}"
local password="${2:-$MYSQL_ROOT_PASSWORD}"
docker exec "$container" mysqladmin ping -uroot -p"$password" >/dev/null 2>&1
}
# =============================================================================
# DATABASE UTILITY FUNCTIONS
# =============================================================================
# Check if database exists
# Returns 0 if database exists, 1 if not found
#
# Usage:
# if mysql_database_exists "acore_world"; then
# echo "World database found"
# fi
#
mysql_database_exists() {
local database_name="$1"
local result
result=$(mysql_query "information_schema" "SELECT COUNT(*) FROM SCHEMATA WHERE SCHEMA_NAME='$database_name'" 2>/dev/null || echo "0")
[ "$result" -gt 0 ] 2>/dev/null
}
# Get table count for database(s)
# Supports both single database and multiple database patterns
#
# Usage:
# count=$(mysql_get_table_count "acore_world")
# count=$(mysql_get_table_count "acore_auth,acore_characters")
#
mysql_get_table_count() {
local databases="$1"
local schema_list
# Convert comma-separated list to SQL IN clause format
schema_list=$(echo "$databases" | sed "s/,/','/g" | sed "s/^/'/" | sed "s/$/'/")
mysql_query "information_schema" "SELECT COUNT(*) FROM tables WHERE table_schema IN ($schema_list)"
}
# Get database connection string for applications
# Returns connection string in format: host;port;user;password;database
#
# Usage:
# conn_str=$(mysql_get_connection_string "acore_auth")
#
mysql_get_connection_string() {
local database="$1"
local host="${2:-$MYSQL_HOST}"
local port="${3:-$MYSQL_PORT}"
local user="${4:-$MYSQL_USER}"
local password="${5:-$MYSQL_ROOT_PASSWORD}"
printf '%s;%s;%s;%s;%s\n' "$host" "$port" "$user" "$password" "$database"
}
# =============================================================================
# BACKUP AND RESTORE UTILITIES
# =============================================================================
# Create database backup using mysqldump
# Supports both compressed and uncompressed output
#
# Usage:
# mysql_backup_database "acore_characters" "/path/to/backup.sql"
# mysql_backup_database "acore_world" "/path/to/backup.sql.gz" "gzip"
#
mysql_backup_database() {
local database="$1"
local output_file="$2"
local compression="${3:-none}"
local container="${4:-$MYSQL_CONTAINER}"
local password="${5:-$MYSQL_ROOT_PASSWORD}"
info "Creating backup of $database -> $output_file"
case "$compression" in
gzip|gz)
docker exec "$container" mysqldump -uroot -p"$password" "$database" | gzip > "$output_file"
;;
none|*)
docker exec "$container" mysqldump -uroot -p"$password" "$database" > "$output_file"
;;
esac
}
# Restore database from backup file
# Handles both compressed and uncompressed files automatically
#
# Usage:
# mysql_restore_database "acore_characters" "/path/to/backup.sql"
# mysql_restore_database "acore_world" "/path/to/backup.sql.gz"
#
mysql_restore_database() {
local database="$1"
local backup_file="$2"
local container="${3:-$MYSQL_CONTAINER}"
local password="${4:-$MYSQL_ROOT_PASSWORD}"
if [ ! -f "$backup_file" ]; then
err "Backup file not found: $backup_file"
return 1
fi
info "Restoring $database from $backup_file"
case "$backup_file" in
*.gz)
gzip -dc "$backup_file" | docker exec -i "$container" mysql -uroot -p"$password" "$database"
;;
*.sql)
docker exec -i "$container" mysql -uroot -p"$password" "$database" < "$backup_file"
;;
*)
warn "Unknown backup file format, treating as uncompressed SQL"
docker exec -i "$container" mysql -uroot -p"$password" "$database" < "$backup_file"
;;
esac
}
# =============================================================================
# VALIDATION AND DIAGNOSTICS
# =============================================================================
# Validate MySQL configuration and connectivity
# Comprehensive health check for MySQL setup
#
# Usage:
# mysql_validate_configuration
#
mysql_validate_configuration() {
info "Validating MySQL configuration..."
# Check required environment variables
if [ -z "$MYSQL_ROOT_PASSWORD" ]; then
err "MYSQL_ROOT_PASSWORD is not set"
return 1
fi
# Test basic connectivity
if ! mysql_test_connection; then
err "Cannot connect to MySQL at ${MYSQL_HOST}:${MYSQL_PORT}"
return 1
fi
# Check Docker container if using container setup
if docker ps --format "table {{.Names}}" | grep -q "$MYSQL_CONTAINER"; then
if ! docker_mysql_is_ready; then
err "MySQL container $MYSQL_CONTAINER is not ready"
return 1
fi
info "MySQL container $MYSQL_CONTAINER is healthy"
fi
info "MySQL configuration validation successful"
return 0
}
# Print MySQL configuration summary
# Useful for debugging and verification
#
# Usage:
# mysql_print_configuration
#
mysql_print_configuration() {
info "MySQL Configuration Summary:"
info " Host: $MYSQL_HOST"
info " Port: $MYSQL_PORT"
info " User: $MYSQL_USER"
info " Container: $MYSQL_CONTAINER"
info " Password: $([ -n "$MYSQL_ROOT_PASSWORD" ] && echo "***SET***" || echo "***NOT SET***")"
}
# =============================================================================
# INITIALIZATION
# =============================================================================
# Library loaded successfully
# Scripts can check for $_MYSQL_UTILS_LIB_LOADED to verify library is loaded

View File

@@ -141,10 +141,6 @@ run_post_install_hooks(){
export MODULES_ROOT="${MODULES_ROOT:-/modules}"
export LUA_SCRIPTS_TARGET="/azerothcore/lua_scripts"
# Pass build environment variables to hooks
export STACK_SOURCE_VARIANT="${STACK_SOURCE_VARIANT:-}"
export MODULES_REBUILD_SOURCE_PATH="${MODULES_REBUILD_SOURCE_PATH:-}"
# Execute the hook script
if "$hook_script"; then
ok "Hook '$hook' completed successfully"
@@ -178,18 +174,7 @@ install_enabled_modules(){
continue
fi
if [ -d "$dir/.git" ]; then
info "$dir already present; checking for updates"
(cd "$dir" && git fetch origin >/dev/null 2>&1 || warn "Failed to fetch updates for $dir")
local current_branch
current_branch=$(cd "$dir" && git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "master")
if (cd "$dir" && git pull origin "$current_branch" 2>&1 | grep -q "Already up to date"); then
info "$dir is already up to date"
else
ok "$dir updated from remote"
fi
if [ -n "$ref" ]; then
(cd "$dir" && git checkout "$ref") || warn "Unable to checkout ref $ref for $dir"
fi
info "$dir already present; skipping clone"
elif [ -d "$dir" ]; then
warn "$dir exists but is not a git repository; leaving in place"
else
@@ -482,7 +467,6 @@ load_sql_helper(){
# Module SQL is now staged at runtime by stage-modules.sh which copies files to
# /azerothcore/data/sql/updates/ (core directory) where they ARE scanned and processed.
track_module_state(){
echo 'Checking for module changes that require rebuild...'

View File

@@ -1,344 +0,0 @@
#!/bin/bash
# Import character pdump files into AzerothCore database
set -euo pipefail
INVOCATION_DIR="$PWD"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
COLOR_RED='\033[0;31m'
COLOR_GREEN='\033[0;32m'
COLOR_YELLOW='\033[1;33m'
COLOR_BLUE='\033[0;34m'
COLOR_RESET='\033[0m'
log(){ printf '%b\n' "${COLOR_GREEN}$*${COLOR_RESET}"; }
warn(){ printf '%b\n' "${COLOR_YELLOW}$*${COLOR_RESET}"; }
err(){ printf '%b\n' "${COLOR_RED}$*${COLOR_RESET}"; }
info(){ printf '%b\n' "${COLOR_BLUE}$*${COLOR_RESET}"; }
fatal(){ err "$*"; exit 1; }
MYSQL_PW=""
PDUMP_FILE=""
TARGET_ACCOUNT=""
NEW_CHARACTER_NAME=""
FORCE_GUID=""
AUTH_DB="acore_auth"
CHARACTERS_DB="acore_characters"
DRY_RUN=false
BACKUP_BEFORE=true
usage(){
cat <<'EOF'
Usage: ./pdump-import.sh [options]
Import character pdump files into AzerothCore database.
Required Options:
-f, --file FILE Pdump file to import (.pdump or .sql format)
-a, --account ACCOUNT Target account name or ID for character import
-p, --password PASS MySQL root password
Optional:
-n, --name NAME New character name (if different from dump)
-g, --guid GUID Force specific character GUID
--auth-db NAME Auth database schema name (default: acore_auth)
--characters-db NAME Characters database schema name (default: acore_characters)
--dry-run Validate pdump without importing
--no-backup Skip pre-import backup (not recommended)
-h, --help Show this help and exit
Examples:
# Import character from pdump file
./pdump-import.sh --file character.pdump --account testaccount --password azerothcore123
# Import with new character name
./pdump-import.sh --file oldchar.pdump --account newaccount --name "NewCharName" --password azerothcore123
# Validate pdump file without importing
./pdump-import.sh --file character.pdump --account testaccount --password azerothcore123 --dry-run
Notes:
- Account must exist in the auth database before import
- Character names must be unique across the server
- Pre-import backup is created automatically (can be disabled with --no-backup)
- Use --dry-run to validate pdump structure before actual import
EOF
}
validate_account(){
local account="$1"
if [[ "$account" =~ ^[0-9]+$ ]]; then
# Account ID provided
local count
count=$(docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT COUNT(*) FROM ${AUTH_DB}.account WHERE id = $account;")
[[ "$count" -eq 1 ]] || fatal "Account ID $account not found in auth database"
else
# Account name provided
local count
count=$(docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT COUNT(*) FROM ${AUTH_DB}.account WHERE username = '$account';")
[[ "$count" -eq 1 ]] || fatal "Account '$account' not found in auth database"
fi
}
get_account_id(){
local account="$1"
if [[ "$account" =~ ^[0-9]+$ ]]; then
echo "$account"
else
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT id FROM ${AUTH_DB}.account WHERE username = '$account';"
fi
}
validate_character_name(){
local name="$1"
# Check character name format (WoW naming rules)
if [[ ! "$name" =~ ^[A-Za-z]{2,12}$ ]]; then
fatal "Invalid character name: '$name'. Must be 2-12 letters, no numbers or special characters."
fi
# Check if character name already exists
local count
count=$(docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT COUNT(*) FROM ${CHARACTERS_DB}.characters WHERE name = '$name';")
[[ "$count" -eq 0 ]] || fatal "Character name '$name' already exists in database"
}
get_next_guid(){
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT COALESCE(MAX(guid), 0) + 1 FROM ${CHARACTERS_DB}.characters;"
}
validate_pdump_format(){
local file="$1"
if [[ ! -f "$file" ]]; then
fatal "Pdump file not found: $file"
fi
# Check if file is readable and has SQL-like content
if ! head -10 "$file" | grep -q -i "INSERT\|UPDATE\|CREATE\|ALTER"; then
warn "File does not appear to contain SQL statements. Continuing anyway..."
fi
info "Pdump file validation: OK"
}
backup_characters(){
local timestamp
timestamp=$(date +%Y%m%d_%H%M%S)
local backup_file="manual-backups/characters-pre-pdump-import-${timestamp}.sql"
mkdir -p manual-backups
log "Creating backup: $backup_file"
docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$CHARACTERS_DB" > "$backup_file"
echo "$backup_file"
}
process_pdump_sql(){
local file="$1"
local account_id="$2"
local new_guid="${3:-}"
local new_name="${4:-}"
# Create temporary processed file
local temp_file
temp_file=$(mktemp)
# Process the pdump SQL file
# Replace account references and optionally GUID/name
if [[ -n "$new_guid" && -n "$new_name" ]]; then
sed -e "s/\([^0-9]\)[0-9]\+\([^0-9].*account.*=\)/\1${account_id}\2/g" \
-e "s/\([^0-9]\)[0-9]\+\([^0-9].*guid.*=\)/\1${new_guid}\2/g" \
-e "s/'[^']*'\([^']*name.*=\)/'${new_name}'\1/g" \
"$file" > "$temp_file"
elif [[ -n "$new_guid" ]]; then
sed -e "s/\([^0-9]\)[0-9]\+\([^0-9].*account.*=\)/\1${account_id}\2/g" \
-e "s/\([^0-9]\)[0-9]\+\([^0-9].*guid.*=\)/\1${new_guid}\2/g" \
"$file" > "$temp_file"
elif [[ -n "$new_name" ]]; then
sed -e "s/\([^0-9]\)[0-9]\+\([^0-9].*account.*=\)/\1${account_id}\2/g" \
-e "s/'[^']*'\([^']*name.*=\)/'${new_name}'\1/g" \
"$file" > "$temp_file"
else
sed -e "s/\([^0-9]\)[0-9]\+\([^0-9].*account.*=\)/\1${account_id}\2/g" \
"$file" > "$temp_file"
fi
echo "$temp_file"
}
import_pdump(){
local processed_file="$1"
log "Importing character data into $CHARACTERS_DB database"
if docker exec -i ac-mysql mysql -uroot -p"$MYSQL_PW" "$CHARACTERS_DB" < "$processed_file"; then
log "Character import completed successfully"
else
fatal "Character import failed. Check MySQL logs for details."
fi
}
case "${1:-}" in
-h|--help) usage; exit 0;;
esac
# Parse command line arguments
POSITIONAL=()
while [[ $# -gt 0 ]]; do
case "$1" in
-f|--file)
[[ $# -ge 2 ]] || fatal "--file requires a file path"
PDUMP_FILE="$2"
shift 2
;;
-a|--account)
[[ $# -ge 2 ]] || fatal "--account requires an account name or ID"
TARGET_ACCOUNT="$2"
shift 2
;;
-p|--password)
[[ $# -ge 2 ]] || fatal "--password requires a value"
MYSQL_PW="$2"
shift 2
;;
-n|--name)
[[ $# -ge 2 ]] || fatal "--name requires a character name"
NEW_CHARACTER_NAME="$2"
shift 2
;;
-g|--guid)
[[ $# -ge 2 ]] || fatal "--guid requires a GUID number"
FORCE_GUID="$2"
shift 2
;;
--auth-db)
[[ $# -ge 2 ]] || fatal "--auth-db requires a value"
AUTH_DB="$2"
shift 2
;;
--characters-db)
[[ $# -ge 2 ]] || fatal "--characters-db requires a value"
CHARACTERS_DB="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
--no-backup)
BACKUP_BEFORE=false
shift
;;
-h|--help)
usage
exit 0
;;
--)
shift
while [[ $# -gt 0 ]]; do
POSITIONAL+=("$1")
shift
done
break
;;
-*)
fatal "Unknown option: $1"
;;
*)
POSITIONAL+=("$1")
shift
;;
esac
done
# Validate required arguments
[[ -n "$PDUMP_FILE" ]] || fatal "Pdump file is required. Use --file FILE"
[[ -n "$TARGET_ACCOUNT" ]] || fatal "Target account is required. Use --account ACCOUNT"
[[ -n "$MYSQL_PW" ]] || fatal "MySQL password is required. Use --password PASS"
# Resolve relative paths
if [[ ! "$PDUMP_FILE" =~ ^/ ]]; then
PDUMP_FILE="$INVOCATION_DIR/$PDUMP_FILE"
fi
# Validate inputs
log "Validating pdump file..."
validate_pdump_format "$PDUMP_FILE"
log "Validating target account..."
validate_account "$TARGET_ACCOUNT"
ACCOUNT_ID=$(get_account_id "$TARGET_ACCOUNT")
log "Target account ID: $ACCOUNT_ID"
if [[ -n "$NEW_CHARACTER_NAME" ]]; then
log "Validating new character name..."
validate_character_name "$NEW_CHARACTER_NAME"
fi
# Determine GUID
if [[ -n "$FORCE_GUID" ]]; then
CHARACTER_GUID="$FORCE_GUID"
log "Using forced GUID: $CHARACTER_GUID"
else
CHARACTER_GUID=$(get_next_guid)
log "Using next available GUID: $CHARACTER_GUID"
fi
# Process pdump file
log "Processing pdump file..."
PROCESSED_FILE=$(process_pdump_sql "$PDUMP_FILE" "$ACCOUNT_ID" "$CHARACTER_GUID" "$NEW_CHARACTER_NAME")
if $DRY_RUN; then
info "DRY RUN: Pdump processing completed successfully"
info "Processed file saved to: $PROCESSED_FILE"
info "Account ID: $ACCOUNT_ID"
info "Character GUID: $CHARACTER_GUID"
[[ -n "$NEW_CHARACTER_NAME" ]] && info "Character name: $NEW_CHARACTER_NAME"
info "Run without --dry-run to perform actual import"
rm -f "$PROCESSED_FILE"
exit 0
fi
# Create backup before import
BACKUP_FILE=""
if $BACKUP_BEFORE; then
BACKUP_FILE=$(backup_characters)
fi
# Stop world server to prevent issues during import
log "Stopping world server for safe import..."
docker stop ac-worldserver >/dev/null 2>&1 || warn "World server was not running"
# Perform import
trap 'rm -f "$PROCESSED_FILE"' EXIT
import_pdump "$PROCESSED_FILE"
# Restart world server
log "Restarting world server..."
docker start ac-worldserver >/dev/null 2>&1
# Wait for server to initialize
log "Waiting for world server to initialize..."
for i in {1..30}; do
if docker exec ac-worldserver pgrep worldserver >/dev/null 2>&1; then
log "World server is running"
break
fi
if [ $i -eq 30 ]; then
warn "World server took longer than expected to start"
fi
sleep 2
done
# Verify import
CHARACTER_COUNT=$(docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT COUNT(*) FROM ${CHARACTERS_DB}.characters WHERE account = $ACCOUNT_ID;")
log "Import completed successfully!"
log "Characters on account $TARGET_ACCOUNT: $CHARACTER_COUNT"
[[ -n "$BACKUP_FILE" ]] && log "Backup created: $BACKUP_FILE"
info "Character import from pdump completed. You can now log in and play!"

View File

@@ -3,8 +3,21 @@
# to re-copy SQL files.
set -euo pipefail
info(){ echo "🔧 [restore-stage] $*"; }
warn(){ echo "⚠️ [restore-stage] $*" >&2; }
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Source common library for standardized logging
if ! source "$SCRIPT_DIR/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load $SCRIPT_DIR/lib/common.sh" >&2
exit 1
fi
# Specialized prefixed logging for this restoration context
restore_info() { info "🔧 [restore-stage] $*"; }
restore_warn() { warn "[restore-stage] $*"; }
# Maintain compatibility with existing function calls
info() { restore_info "$*"; }
warn() { restore_warn "$*"; }
MODULES_DIR="${MODULES_DIR:-/modules}"
MODULES_META_DIR="${MODULES_DIR}/.modules-meta"

View File

@@ -4,7 +4,6 @@ import os
import re
import socket
import subprocess
import sys
import time
from pathlib import Path
@@ -469,14 +468,8 @@ def docker_stats():
def main():
env = load_env()
project = read_env(env, "COMPOSE_PROJECT_NAME")
if not project:
print(json.dumps({"error": "COMPOSE_PROJECT_NAME not set in environment"}), file=sys.stderr)
sys.exit(1)
network = read_env(env, "NETWORK_NAME")
if not network:
print(json.dumps({"error": "NETWORK_NAME not set in environment"}), file=sys.stderr)
sys.exit(1)
project = read_env(env, "COMPOSE_PROJECT_NAME", "acore-compose")
network = read_env(env, "NETWORK_NAME", "azerothcore")
services = [
("ac-mysql", "MySQL"),

View File

@@ -1,65 +0,0 @@
#!/usr/bin/env python3
"""
Test TOTP token generation for AzerothCore 2FA
"""
import base64
import hmac
import hashlib
import struct
import time
import argparse
def generate_totp(secret, timestamp=None, interval=30):
"""Generate TOTP token from Base32 secret"""
if timestamp is None:
timestamp = int(time.time())
# Calculate time counter
counter = timestamp // interval
# Decode Base32 secret
# Add padding if needed
secret = secret.upper()
missing_padding = len(secret) % 8
if missing_padding:
secret += '=' * (8 - missing_padding)
key = base64.b32decode(secret)
# Pack counter as big-endian 8-byte integer
counter_bytes = struct.pack('>Q', counter)
# Generate HMAC-SHA1 hash
hmac_hash = hmac.new(key, counter_bytes, hashlib.sha1).digest()
# Dynamic truncation
offset = hmac_hash[-1] & 0xf
code = struct.unpack('>I', hmac_hash[offset:offset + 4])[0]
code &= 0x7fffffff
code %= 1000000
return f"{code:06d}"
def main():
parser = argparse.ArgumentParser(description="Generate TOTP tokens for testing")
parser.add_argument('-s', '--secret', required=True, help='Base32 secret')
parser.add_argument('-t', '--time', type=int, help='Unix timestamp (default: current time)')
parser.add_argument('-c', '--count', type=int, default=1, help='Number of tokens to generate')
args = parser.parse_args()
timestamp = args.time or int(time.time())
print(f"Secret: {args.secret}")
print(f"Timestamp: {timestamp} ({time.ctime(timestamp)})")
print(f"Interval: 30 seconds")
print()
for i in range(args.count):
current_time = timestamp + (i * 30)
token = generate_totp(args.secret, current_time)
print(f"Time: {time.ctime(current_time)} | Token: {token}")
if __name__ == "__main__":
main()

View File

@@ -1,301 +0,0 @@
#!/bin/bash
# Validate environment configuration for AzerothCore RealmMaster
# Usage: ./scripts/bash/validate-env.sh [--strict] [--quiet]
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
ENV_FILE="$PROJECT_ROOT/.env"
TEMPLATE_FILE="$PROJECT_ROOT/.env.template"
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Flags
STRICT_MODE=false
QUIET_MODE=false
EXIT_CODE=0
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--strict)
STRICT_MODE=true
shift
;;
--quiet)
QUIET_MODE=true
shift
;;
-h|--help)
cat <<EOF
Usage: $0 [OPTIONS]
Validates environment configuration for required variables.
OPTIONS:
--strict Fail on missing optional variables
--quiet Only show errors, suppress info/success messages
-h, --help Show this help
EXIT CODES:
0 - All required variables present
1 - Missing required variables
2 - Missing optional variables (only in --strict mode)
REQUIRED VARIABLES:
Project Configuration:
COMPOSE_PROJECT_NAME - Project name for containers/images
NETWORK_NAME - Docker network name
Repository Configuration:
ACORE_REPO_STANDARD - Standard AzerothCore repository URL
ACORE_BRANCH_STANDARD - Standard AzerothCore branch name
ACORE_REPO_PLAYERBOTS - Playerbots repository URL
ACORE_BRANCH_PLAYERBOTS - Playerbots branch name
Storage Paths:
STORAGE_PATH - Main storage path
STORAGE_PATH_LOCAL - Local storage path
Database Configuration:
MYSQL_ROOT_PASSWORD - MySQL root password
MYSQL_USER - MySQL user (typically root)
MYSQL_PORT - MySQL port (typically 3306)
MYSQL_HOST - MySQL hostname
DB_AUTH_NAME - Auth database name
DB_WORLD_NAME - World database name
DB_CHARACTERS_NAME - Characters database name
DB_PLAYERBOTS_NAME - Playerbots database name
Container Configuration:
CONTAINER_MYSQL - MySQL container name
CONTAINER_USER - Container user (format: uid:gid)
OPTIONAL VARIABLES (checked with --strict):
MySQL Performance:
MYSQL_INNODB_BUFFER_POOL_SIZE - InnoDB buffer pool size
MYSQL_INNODB_LOG_FILE_SIZE - InnoDB log file size
MYSQL_INNODB_REDO_LOG_CAPACITY - InnoDB redo log capacity
Database Connection:
DB_RECONNECT_SECONDS - Database reconnection delay
DB_RECONNECT_ATTEMPTS - Database reconnection attempts
Build Configuration:
MODULES_REBUILD_SOURCE_PATH - Path to source for module builds
Backup Configuration:
BACKUP_PATH - Backup storage path
BACKUP_RETENTION_DAYS - Daily backup retention
BACKUP_RETENTION_HOURS - Hourly backup retention
Image Configuration:
AC_AUTHSERVER_IMAGE - Auth server Docker image
AC_WORLDSERVER_IMAGE - World server Docker image
AC_DB_IMPORT_IMAGE - Database import Docker image
EXAMPLES:
$0 # Basic validation
$0 --strict # Strict validation (check optional vars)
$0 --quiet # Only show errors
EOF
exit 0
;;
*)
echo -e "${RED}Unknown option: $1${NC}" >&2
exit 1
;;
esac
done
log_info() {
$QUIET_MODE || echo -e "${BLUE} $*${NC}"
}
log_success() {
$QUIET_MODE || echo -e "${GREEN}$*${NC}"
}
log_warning() {
echo -e "${YELLOW}⚠️ $*${NC}" >&2
}
log_error() {
echo -e "${RED}$*${NC}" >&2
}
# Load environment
load_env() {
local file="$1"
if [[ ! -f "$file" ]]; then
return 1
fi
set -a
# shellcheck disable=SC1090
source "$file" 2>/dev/null || return 1
set +a
return 0
}
# Check if variable is set and non-empty
check_var() {
local var_name="$1"
local var_value="${!var_name:-}"
if [[ -z "$var_value" ]]; then
return 1
fi
return 0
}
# Validate required variables
validate_required() {
local missing=()
local required_vars=(
# Project Configuration
"COMPOSE_PROJECT_NAME"
"NETWORK_NAME"
# Repository Configuration
"ACORE_REPO_STANDARD"
"ACORE_BRANCH_STANDARD"
"ACORE_REPO_PLAYERBOTS"
"ACORE_BRANCH_PLAYERBOTS"
# Storage Paths
"STORAGE_PATH"
"STORAGE_PATH_LOCAL"
# Database Configuration
"MYSQL_ROOT_PASSWORD"
"MYSQL_USER"
"MYSQL_PORT"
"MYSQL_HOST"
"DB_AUTH_NAME"
"DB_WORLD_NAME"
"DB_CHARACTERS_NAME"
"DB_PLAYERBOTS_NAME"
# Container Configuration
"CONTAINER_MYSQL"
"CONTAINER_USER"
)
log_info "Checking required variables..."
for var in "${required_vars[@]}"; do
if check_var "$var"; then
log_success "$var=${!var}"
else
log_error "$var is not set"
missing+=("$var")
fi
done
if [[ ${#missing[@]} -gt 0 ]]; then
log_error "Missing required variables: ${missing[*]}"
return 1
fi
log_success "All required variables are set"
return 0
}
# Validate optional variables (strict mode)
validate_optional() {
local missing=()
local optional_vars=(
# MySQL Performance Tuning
"MYSQL_INNODB_BUFFER_POOL_SIZE"
"MYSQL_INNODB_LOG_FILE_SIZE"
"MYSQL_INNODB_REDO_LOG_CAPACITY"
# Database Connection Settings
"DB_RECONNECT_SECONDS"
"DB_RECONNECT_ATTEMPTS"
# Build Configuration
"MODULES_REBUILD_SOURCE_PATH"
# Backup Configuration
"BACKUP_PATH"
"BACKUP_RETENTION_DAYS"
"BACKUP_RETENTION_HOURS"
# Image Configuration
"AC_AUTHSERVER_IMAGE"
"AC_WORLDSERVER_IMAGE"
"AC_DB_IMPORT_IMAGE"
)
log_info "Checking optional variables..."
for var in "${optional_vars[@]}"; do
if check_var "$var"; then
log_success "$var is set"
else
log_warning "$var is not set (using default)"
missing+=("$var")
fi
done
if [[ ${#missing[@]} -gt 0 ]]; then
log_warning "Optional variables not set: ${missing[*]}"
return 2
fi
log_success "All optional variables are set"
return 0
}
# Main validation
main() {
log_info "Validating environment configuration..."
echo ""
# Check if .env exists
if [[ ! -f "$ENV_FILE" ]]; then
log_error ".env file not found at $ENV_FILE"
log_info "Copy .env.template to .env and configure it:"
log_info " cp $TEMPLATE_FILE $ENV_FILE"
exit 1
fi
# Load environment
if ! load_env "$ENV_FILE"; then
log_error "Failed to load $ENV_FILE"
exit 1
fi
log_success "Loaded environment from $ENV_FILE"
echo ""
# Validate required variables
if ! validate_required; then
EXIT_CODE=1
fi
echo ""
# Validate optional variables if strict mode
if $STRICT_MODE; then
if ! validate_optional; then
[[ $EXIT_CODE -eq 0 ]] && EXIT_CODE=2
fi
echo ""
fi
# Final summary
if [[ $EXIT_CODE -eq 0 ]]; then
log_success "Environment validation passed ✨"
elif [[ $EXIT_CODE -eq 1 ]]; then
log_error "Environment validation failed (missing required variables)"
elif [[ $EXIT_CODE -eq 2 ]]; then
log_warning "Environment validation passed with warnings (missing optional variables)"
fi
exit $EXIT_CODE
}
main "$@"

View File

@@ -4,13 +4,14 @@ set -e
# Simple profile-aware deploy + health check for profiles-verify/docker-compose.yml
BLUE='\033[0;34m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; RED='\033[0;31m'; NC='\033[0m'
info(){ echo -e "${BLUE} $*${NC}"; }
ok(){ echo -e "${GREEN}$*${NC}"; }
warn(){ echo -e "${YELLOW}⚠️ $*${NC}"; }
err(){ echo -e "${RED}$*${NC}"; }
PROJECT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
# Source common library for standardized logging
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if ! source "$SCRIPT_DIR/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load $SCRIPT_DIR/lib/common.sh" >&2
exit 1
fi
COMPOSE_FILE="$PROJECT_DIR/docker-compose.yml"
ENV_FILE=""
TEMPLATE_FILE="$PROJECT_DIR/.env.template"

View File

@@ -1,4 +1,4 @@
module azerothcore-realmmaster/statusdash
module acore-compose/statusdash
go 1.22

View File

@@ -1,6 +1,5 @@
#!/bin/bash
# Module-specific hook for mod-ale compatibility patches
# NOTE: These patches are primarily needed for the AzerothCore playerbots fork
set -e
# Hook environment
@@ -8,42 +7,12 @@ MODULE_KEY="${MODULE_KEY:-}"
MODULE_DIR="${MODULE_DIR:-}"
MODULE_NAME="${MODULE_NAME:-}"
# Detect if we're building with playerbots fork
IS_PLAYERBOTS_FORK=0
# Method 1: Check STACK_SOURCE_VARIANT environment variable
if [ "${STACK_SOURCE_VARIANT:-}" = "playerbots" ]; then
IS_PLAYERBOTS_FORK=1
echo " ✅ Playerbots detected via STACK_SOURCE_VARIANT"
# Method 2: Check MODULES_REBUILD_SOURCE_PATH
elif [ -n "${MODULES_REBUILD_SOURCE_PATH:-}" ] && echo "${MODULES_REBUILD_SOURCE_PATH}" | grep -q "azerothcore-playerbots"; then
IS_PLAYERBOTS_FORK=1
echo " ✅ Playerbots detected via MODULES_REBUILD_SOURCE_PATH"
else
echo " ❌ Playerbots fork not detected"
echo " 🔍 Debug: STACK_SOURCE_VARIANT='${STACK_SOURCE_VARIANT:-}'"
echo " 🔍 Debug: MODULES_REBUILD_SOURCE_PATH='${MODULES_REBUILD_SOURCE_PATH:-}'"
fi
# Feature flags (set to 0 to disable specific patches)
APPLY_MOVEPATH_PATCH="${APPLY_MOVEPATH_PATCH:-0}" # Disabled by default - appears unnecessary
# SendTrainerList patch: auto-detect based on fork, but can be overridden
if [ -z "${APPLY_SENDTRAINERLIST_PATCH:-}" ]; then
APPLY_SENDTRAINERLIST_PATCH="$IS_PLAYERBOTS_FORK" # Only needed for playerbots fork
else
APPLY_SENDTRAINERLIST_PATCH="${APPLY_SENDTRAINERLIST_PATCH}"
fi
if [ -z "$MODULE_DIR" ] || [ ! -d "$MODULE_DIR" ]; then
echo "❌ mod-ale-patches: Invalid module directory: $MODULE_DIR"
exit 2
fi
if [ "$IS_PLAYERBOTS_FORK" = "1" ]; then
echo "🔧 mod-ale-patches: Applying playerbots fork compatibility fixes to $MODULE_NAME"
else
echo "🔧 mod-ale-patches: Checking compatibility fixes for $MODULE_NAME"
fi
echo "🔧 mod-ale-patches: Applying compatibility fixes to $MODULE_NAME"
# Apply MovePath compatibility patch
apply_movepath_patch() {
@@ -68,42 +37,10 @@ apply_movepath_patch() {
fi
}
# Apply SendTrainerList compatibility patch
apply_sendtrainerlist_patch() {
local target_file="$MODULE_DIR/src/LuaEngine/methods/PlayerMethods.h"
if [ ! -f "$target_file" ]; then
echo " ⚠️ SendTrainerList patch target file missing: $target_file"
return 1
fi
# Check if the buggy code exists (without GetGUID())
if grep -q 'player->GetSession()->SendTrainerList(obj);' "$target_file"; then
# Apply the fix by adding ->GetGUID()
if sed -i 's/player->GetSession()->SendTrainerList(obj);/player->GetSession()->SendTrainerList(obj->GetGUID());/' "$target_file"; then
echo " ✅ Applied SendTrainerList compatibility fix"
return 0
else
echo " ❌ Failed to apply SendTrainerList compatibility fix"
return 2
fi
else
echo " ✅ SendTrainerList compatibility fix already present"
return 0
fi
}
# Apply all patches
patch_count=0
if [ "$APPLY_MOVEPATH_PATCH" = "1" ]; then
if apply_movepath_patch; then
patch_count=$((patch_count + 1))
fi
fi
if [ "$APPLY_SENDTRAINERLIST_PATCH" = "1" ]; then
if apply_sendtrainerlist_patch; then
patch_count=$((patch_count + 1))
fi
if apply_movepath_patch; then
patch_count=$((patch_count + 1))
fi
if [ $patch_count -eq 0 ]; then

View File

@@ -371,7 +371,12 @@ def build_state(env_path: Path, manifest_path: Path) -> ModuleCollectionState:
for unknown_key in extra_env_modules:
warnings.append(f".env defines {unknown_key} but it is missing from the manifest")
# Skip warnings for missing modules - they default to disabled (0) as intended
# Warn if manifest entry lacks .env toggle
for module in modules:
if module.key not in env_map and module.key not in os.environ:
warnings.append(
f"Manifest includes {module.key} but .env does not define it (defaulting to 0)"
)
return ModuleCollectionState(
manifest_path=manifest_path,

View File

@@ -50,9 +50,6 @@ def clean(value: str) -> str:
def cmd_keys(manifest_path: str) -> None:
manifest = load_manifest(manifest_path)
for entry in iter_modules(manifest):
# Skip blocked modules
if entry.get("status") == "blocked":
continue
print(entry["key"])
@@ -99,7 +96,7 @@ def cmd_metadata(manifest_path: str) -> None:
def cmd_sorted_keys(manifest_path: str) -> None:
manifest = load_manifest(manifest_path)
modules = [entry for entry in iter_modules(manifest) if entry.get("status") != "blocked"]
modules = list(iter_modules(manifest))
modules.sort(
key=lambda item: (
# Primary sort by order (default to 5000 if not specified)

View File

@@ -28,9 +28,8 @@ def normalize_modules(raw_modules: Iterable[str], profile: Path) -> List[str]:
if not value:
continue
modules.append(value)
# Allow empty modules list for vanilla/minimal profiles
if not modules and "vanilla" not in profile.stem.lower() and "minimal" not in profile.stem.lower():
raise ValueError(f"Profile {profile.name}: modules list cannot be empty (except for vanilla/minimal profiles)")
if not modules:
raise ValueError(f"Profile {profile.name}: modules list cannot be empty")
return modules
@@ -80,7 +79,7 @@ def cmd_list(directory: Path) -> int:
profiles.sort(key=lambda item: item[4])
for name, modules, label, description, order in profiles:
modules_csv = ",".join(modules) if modules else "-"
modules_csv = ",".join(modules)
print("\t".join([name, modules_csv, label, description, str(order)]))
return 0

View File

@@ -18,7 +18,6 @@ import re
import sys
import time
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, Iterable, List, Optional, Sequence
from urllib import error, parse, request
@@ -46,7 +45,7 @@ CATEGORY_BY_TYPE = {
"data": "data",
"cpp": "uncategorized",
}
USER_AGENT = "azerothcore-realmmaster-module-manifest"
USER_AGENT = "acore-compose-module-manifest"
def parse_args(argv: Sequence[str]) -> argparse.Namespace:
@@ -88,16 +87,6 @@ def parse_args(argv: Sequence[str]) -> argparse.Namespace:
action="store_true",
help="Print verbose progress information",
)
parser.add_argument(
"--update-template",
default=".env.template",
help="Update .env.template with missing module variables (default: %(default)s)",
)
parser.add_argument(
"--skip-template",
action="store_true",
help="Skip updating .env.template",
)
return parser.parse_args(argv)
@@ -284,117 +273,6 @@ def collect_repositories(
return list(seen.values())
def update_env_template(manifest_path: str, template_path: str) -> bool:
"""Update .env.template with module variables for active modules only.
Args:
manifest_path: Path to the module manifest JSON file
template_path: Path to .env.template file
Returns:
True if template was updated, False if no changes needed
"""
# Load manifest to get all module keys
manifest = load_manifest(manifest_path)
modules = manifest.get("modules", [])
if not modules:
return False
# Extract only active module keys
active_module_keys = set()
disabled_module_keys = set()
for module in modules:
key = module.get("key")
status = module.get("status", "active")
if key:
if status == "active":
active_module_keys.add(key)
else:
disabled_module_keys.add(key)
if not active_module_keys and not disabled_module_keys:
return False
# Check if template file exists
template_file = Path(template_path)
if not template_file.exists():
print(f"Warning: .env.template not found at {template_path}")
return False
# Read current template content
try:
current_content = template_file.read_text(encoding="utf-8")
current_lines = current_content.splitlines()
except Exception as exc:
print(f"Error reading .env.template: {exc}")
return False
# Find which module variables are currently in the template
existing_vars = set()
current_module_lines = []
non_module_lines = []
for line in current_lines:
stripped = line.strip()
if "=" in stripped and not stripped.startswith("#"):
var_name = stripped.split("=", 1)[0].strip()
if var_name.startswith("MODULE_"):
existing_vars.add(var_name)
current_module_lines.append((var_name, line))
else:
non_module_lines.append(line)
else:
non_module_lines.append(line)
# Determine what needs to change
missing_vars = active_module_keys - existing_vars
vars_to_remove = disabled_module_keys & existing_vars
vars_to_keep = active_module_keys & existing_vars
changes_made = False
# Report what will be done
if missing_vars:
print(f"📝 Adding {len(missing_vars)} active module variable(s) to .env.template:")
for var in sorted(missing_vars):
print(f" + {var}=0")
changes_made = True
if vars_to_remove:
print(f"🗑️ Removing {len(vars_to_remove)} disabled module variable(s) from .env.template:")
for var in sorted(vars_to_remove):
print(f" - {var}")
changes_made = True
if not changes_made:
print("✅ .env.template is up to date with active modules")
return False
# Build new content: non-module lines + active module lines
new_lines = non_module_lines[:]
# Add existing active module variables (preserve their current values)
for var_name, original_line in current_module_lines:
if var_name in vars_to_keep:
new_lines.append(original_line)
# Add new active module variables
for var in sorted(missing_vars):
new_lines.append(f"{var}=0")
# Write updated content
try:
new_content = "\n".join(new_lines) + "\n"
template_file.write_text(new_content, encoding="utf-8")
print("✅ .env.template updated successfully")
print(f" Active modules: {len(active_module_keys)}")
print(f" Disabled modules removed: {len(vars_to_remove)}")
return True
except Exception as exc:
print(f"Error writing .env.template: {exc}")
return False
def main(argv: Sequence[str]) -> int:
args = parse_args(argv)
topics = args.topics or DEFAULT_TOPICS
@@ -413,13 +291,6 @@ def main(argv: Sequence[str]) -> int:
handle.write("\n")
print(f"Updated manifest {args.manifest}: added {added}, refreshed {updated}")
# Update .env.template if requested (always run to clean up disabled modules)
if not args.skip_template:
template_updated = update_env_template(args.manifest, args.update_template)
if template_updated:
print(f"Updated {args.update_template} with active modules only")
return 0

273
setup.sh
View File

@@ -3,9 +3,9 @@ set -e
clear
# ==============================================
# AzerothCore-RealmMaster - Interactive .env generator
# azerothcore-rm - Interactive .env generator
# ==============================================
# Mirrors options from scripts/setup-server.sh but targets .env
# Mirrors options from scripts/setup-server.sh but targets azerothcore-rm/.env
# Get script directory for template reading
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
@@ -16,12 +16,6 @@ TEMPLATE_FILE="$SCRIPT_DIR/.env.template"
source "$SCRIPT_DIR/scripts/bash/project_name.sh"
DEFAULT_PROJECT_NAME="$(project_name::resolve "$ENV_FILE" "$TEMPLATE_FILE")"
# ==============================================
# Feature Flags
# ==============================================
# Set to 0 to disable server configuration preset selection
ENABLE_CONFIG_PRESETS="${ENABLE_CONFIG_PRESETS:-0}"
# ==============================================
# Constants (auto-loaded from .env.template)
# ==============================================
@@ -337,57 +331,25 @@ show_wow_header() {
echo -e "${RED}"
cat <<'EOF'
##
### :*
##### .**#
###### ***##
****###* *****##.
******##- ******###.
.*********###= ********###
************##### #****###:+* ********####
***********+****##########**********##**# ********#####
********=+***********######**********######*#**+*******###+
-+*****=**************#######*******####**#####**##*****####-
++**++****************#########**####***####***#####****####:
:++*******************#*******####*****#****######***##*****#######
*= -++++++******************************###**********###******######
.+***. :++++++++***************************#+*#*-*******************#**+
++*****= =+++=+++***************************+**###**************++*#####*
-++*****+++- -=++++++++*********+++++**###**+++=+*###**+*********##+++*+++##
+++*********+++=-=+++++++++****+++***+++++*####***+++**=**#*==***#####*++***+*+
+++++***********++=-=++++++++*++****=++*++*#######**+=-=+****+*#########***==+*#*
=+++++++*****++++===-++++++++=+++++=++*+=-+#**#**=####****#**+-+**************##*
++++++++++++++======++++++++=====+++++=-+++*+##########*****==*######*****####
+++++++=++++++====++++++++++========---++++*****#######**==***#*******####*
++===++++++++=====+++++++=+++:::--:::.++++++*****####**+=**************#
=+++++=: =+=====-+++++++++++++++++++++==+++--==----:-++++++****####****+=+*+*******:
++++++++++++++++==+++++++++++++++++++++=+=-===-----:+++++++++**+++****####***+++
=++++++++++++++++++++++++++++++++++++=++++======----==+++++++=+************:
:++++++++++++++=+++++++++++++++++++======-------:-====+****************.
=----=+++-==++++++*******++++++++++++++===============****************=
-=---==-=====--+++++++++++++++++++++++++++===+++++++********++#***#++******
+++++========+=====----++++++++++++++++===+++++===--=**********+=++*++********
+++==========-=============-----:-=++=====+++++++++++++++=-=***********+*********
==----=+===+=================+++++++++++++++++++++++++=-********************
.======++++++===============---:::::==++++++++++++++++++++++=**********++*******:
+++==--::-=+++++++++++++========+===--=+- :::=-=++++++++++++++++++++++ +*****++**+***
.-----::::-=++++++++++++++++++==::-----++. :=+++++++++++++++++++*..-+*********=
:=+++++++++++++++++==.:--===-+++++++++++**++++++:::-********
++++++++++++++++++=+++++++++++++**+++++*****==******
.++++++++++++=-:.-+++++++++***++++************+
+++=========:.=+=-::++*****+*************
-++++++++==+: ..::=-. ..::::=********
.+========+==+++==========---::-+*-
++++++++++++=======-======
++++++++++++++======++
-=======++++++:
...
:::. :::::::::.,:::::: :::::::.. ... :::::::::::: :: .: .,-::::: ... :::::::.. .,::::::
;;`;; '`````;;;;;;;'''' ;;;;``;;;; .;;;;;;;.;;;;;;;;'''',;; ;;, ,;;;'````' .;;;;;;;. ;;;;``;;;; ;;;;''''
,[[ '[[, .n[[' [[cccc [[[,/[[[' ,[[ \[[, [[ ,[[[,,,[[[ [[[ ,[[ \[[,[[[,/[[[' [[cccc
c$$$cc$$$c ,$$P" $$"""" $$$$$$c $$$, $$$ $$ "$$$"""$$$ $$$ $$$, $$$$$$$$$c $$""""
888 888,,888bo,_ 888oo,__ 888b "88bo,"888,_ _,88P 88, 888 "88o`88bo,__,o,"888,_ _,88P888b "88bo,888oo,__
YMM ""` `""*UMM """"YUMMMMMMM "W" "YMMMMMP" MMM MMM YMM "YUMMMMMP" "YMMMMMP" MMMM "W" """"\MMM
:::. :::::::::.,:::::: :::::::.. ... :::::::::::: :: .: .,-::::: ... :::::::.. .,::::::
;;`;; '`````;;;;;;;'''' ;;;;``;;;; .;;;;;;;.;;;;;;;;'''',;; ;;, ,;;;'````' .;;;;;;;. ;;;;``;;;; ;;;;''''
,[[ '[[, .n[[' [[cccc [[[,/[[[' ,[[ \[[, [[ ,[[[,,,[[[ [[[ ,[[ \[[,[[[,/[[[' [[cccc
c$$$cc$$$c ,$$P" $$"""" $$$$$$c $$$, $$$ $$ "$$$"""$$$ $$$ $$$, $$$$$$$$$c $$""""
888 888,,888bo,_ 888oo,__ 888b "88bo,"888,_ _,88P 88, 888 "88o`88bo,__,o,"888,_ _,88P888b "88bo,888oo,__
YMM ""` `""*UMM """"YUMMMMMMM "W" "YMMMMMP" MMM MMM YMM "YUMMMMMP" "YMMMMMP" MMMM "W" """"\MMM
___ ___ ___ ___ ___ ___ ___
.'`~ ``. .'`~ ``. .'`~ ``. .'`~ ``. .'`~ ``. .'`~ ``. .'`~ ``.
)`_ ._ ( )`_ ._ ( )`_ ._ ( )`_ ._ ( )`_ ._ ( )`_ ._ ( )`_ ._ (
|(_/^\_)| |(_/^\_)| |(_/^\_)| |(_/^\_)| |(_/^\_)| |(_/^\_)| |(_/^\_)|
`-.`''.-' `-.`''.-' `-.`''.-' `-.`''.-' `-.`''.-' `-.`''.-' `-.`''.-'
""" """ """ """ """ """ """
.')'=.'_`.='(`. .')'=.'_`.='(`. .')'=.'_`.='(`. .')'=.'_`.='(`. .')'=.'_`.='(`. .')'=.'_`.='(`. .')'=.'_`.='(`.
:| -.._H_,.- |: :| -.._H_,.- |: :| -.._H_,.- |: :| -.._H_,.- |: :| -.._H_,.- |: :| -.._H_,.- |: :| -.._H_,.- |:
|: -.__H__.- :| |: -.__H__.- :| |: -.__H__.- :| |: -.__H__.- :| |: -.__H__.- :| |: -.__H__.- :| |: -.__H__.- :|
<' `--V--' `> <' `--V--' `> <' `--V--' `> <' `--V--' `> <' `--V--' `> <' `--V--' `> <' `--V--' `>
art: littlebitspace@https://littlebitspace.com/
EOF
echo -e "${NC}"
}
@@ -629,7 +591,7 @@ main(){
Usage: ./setup.sh [options]
Description:
Interactive wizard that generates .env for the
Interactive wizard that generates azerothcore-rm/.env for the
profiles-based compose. Prompts for deployment type, ports, storage,
MySQL credentials, backup retention, and module presets or manual
toggles.
@@ -847,7 +809,7 @@ EOF
fi
show_wow_header
say INFO "This will create .env for compose profiles."
say INFO "This will create azerothcore-rm/.env for compose profiles."
# Deployment type
say HEADER "DEPLOYMENT TYPE"
@@ -1001,65 +963,58 @@ fi
BACKUP_DAILY_TIME=$(ask "Daily backup hour (00-23, UTC)" "${CLI_BACKUP_TIME:-$DEFAULT_BACKUP_TIME}" validate_number)
# Server configuration
say HEADER "SERVER CONFIGURATION PRESET"
local SERVER_CONFIG_PRESET
if [ "$ENABLE_CONFIG_PRESETS" = "1" ]; then
say HEADER "SERVER CONFIGURATION PRESET"
if [ -n "$CLI_CONFIG_PRESET" ]; then
SERVER_CONFIG_PRESET="$CLI_CONFIG_PRESET"
say INFO "Using preset from command line: $SERVER_CONFIG_PRESET"
else
declare -A CONFIG_PRESET_NAMES=()
declare -A CONFIG_PRESET_DESCRIPTIONS=()
declare -A CONFIG_MENU_INDEX=()
local config_dir="$SCRIPT_DIR/config/presets"
local menu_index=1
echo "Choose a server configuration preset:"
if [ -x "$SCRIPT_DIR/scripts/python/parse-config-presets.py" ] && [ -d "$config_dir" ]; then
while IFS=$'\t' read -r preset_key preset_name preset_desc; do
[ -n "$preset_key" ] || continue
CONFIG_PRESET_NAMES["$preset_key"]="$preset_name"
CONFIG_PRESET_DESCRIPTIONS["$preset_key"]="$preset_desc"
CONFIG_MENU_INDEX[$menu_index]="$preset_key"
echo "$menu_index) $preset_name"
echo " $preset_desc"
menu_index=$((menu_index + 1))
done < <(python3 "$SCRIPT_DIR/scripts/python/parse-config-presets.py" list --presets-dir "$config_dir")
else
# Fallback if parser script not available
CONFIG_MENU_INDEX[1]="none"
CONFIG_PRESET_NAMES["none"]="Default (No Preset)"
CONFIG_PRESET_DESCRIPTIONS["none"]="Use default AzerothCore settings"
echo "1) Default (No Preset)"
echo " Use default AzerothCore settings without any modifications"
fi
local max_config_option=$((menu_index - 1))
if [ "$NON_INTERACTIVE" = "1" ]; then
SERVER_CONFIG_PRESET="none"
say INFO "Non-interactive mode: Using default configuration preset"
else
while true; do
read -p "$(echo -e "${YELLOW}🎯 Select server configuration [1-$max_config_option]: ${NC}")" choice
if [[ "$choice" =~ ^[0-9]+$ ]] && [ "$choice" -ge 1 ] && [ "$choice" -le "$max_config_option" ]; then
SERVER_CONFIG_PRESET="${CONFIG_MENU_INDEX[$choice]}"
local chosen_name="${CONFIG_PRESET_NAMES[$SERVER_CONFIG_PRESET]}"
say INFO "Selected: $chosen_name"
break
else
say ERROR "Please select a number between 1 and $max_config_option"
fi
done
fi
fi
if [ -n "$CLI_CONFIG_PRESET" ]; then
SERVER_CONFIG_PRESET="$CLI_CONFIG_PRESET"
say INFO "Using preset from command line: $SERVER_CONFIG_PRESET"
else
# Config presets disabled - use default
SERVER_CONFIG_PRESET="none"
say INFO "Server configuration presets disabled - using default settings"
declare -A CONFIG_PRESET_NAMES=()
declare -A CONFIG_PRESET_DESCRIPTIONS=()
declare -A CONFIG_MENU_INDEX=()
local config_dir="$SCRIPT_DIR/config/presets"
local menu_index=1
echo "Choose a server configuration preset:"
if [ -x "$SCRIPT_DIR/scripts/python/parse-config-presets.py" ] && [ -d "$config_dir" ]; then
while IFS=$'\t' read -r preset_key preset_name preset_desc; do
[ -n "$preset_key" ] || continue
CONFIG_PRESET_NAMES["$preset_key"]="$preset_name"
CONFIG_PRESET_DESCRIPTIONS["$preset_key"]="$preset_desc"
CONFIG_MENU_INDEX[$menu_index]="$preset_key"
echo "$menu_index) $preset_name"
echo " $preset_desc"
menu_index=$((menu_index + 1))
done < <(python3 "$SCRIPT_DIR/scripts/python/parse-config-presets.py" list --presets-dir "$config_dir")
else
# Fallback if parser script not available
CONFIG_MENU_INDEX[1]="none"
CONFIG_PRESET_NAMES["none"]="Default (No Preset)"
CONFIG_PRESET_DESCRIPTIONS["none"]="Use default AzerothCore settings"
echo "1) Default (No Preset)"
echo " Use default AzerothCore settings without any modifications"
fi
local max_config_option=$((menu_index - 1))
if [ "$NON_INTERACTIVE" = "1" ]; then
SERVER_CONFIG_PRESET="none"
say INFO "Non-interactive mode: Using default configuration preset"
else
while true; do
read -p "$(echo -e "${YELLOW}🎯 Select server configuration [1-$max_config_option]: ${NC}")" choice
if [[ "$choice" =~ ^[0-9]+$ ]] && [ "$choice" -ge 1 ] && [ "$choice" -le "$max_config_option" ]; then
SERVER_CONFIG_PRESET="${CONFIG_MENU_INDEX[$choice]}"
local chosen_name="${CONFIG_PRESET_NAMES[$SERVER_CONFIG_PRESET]}"
say INFO "Selected: $chosen_name"
break
else
say ERROR "Please select a number between 1 and $max_config_option"
fi
done
fi
fi
local MODE_SELECTION=""
@@ -1139,29 +1094,12 @@ fi
MODE_PRESET_NAME="$CLI_MODULE_PRESET"
fi
# Function to determine source branch for a preset
get_preset_source_branch() {
local preset_name="$1"
local preset_modules="${MODULE_PRESET_CONFIGS[$preset_name]:-}"
# Check if playerbots module is in the preset
if [[ "$preset_modules" == *"MODULE_PLAYERBOTS"* ]]; then
echo "azerothcore-playerbots"
else
echo "azerothcore-wotlk"
fi
}
# Module config
say HEADER "MODULE PRESET"
printf " %s) %s\n" "1" "⭐ Suggested Modules"
printf " %s (%s)\n" "Baseline solo-friendly quality of life mix" "azerothcore-wotlk"
printf " %s) %s\n" "2" "🤖 Playerbots + Suggested modules"
printf " %s (%s)\n" "Suggested stack plus playerbots enabled" "azerothcore-playerbots"
printf " %s) %s\n" "3" "⚙️ Manual selection"
printf " %s (%s)\n" "Choose individual modules manually" "(depends on modules)"
printf " %s) %s\n" "4" "🚫 No modules"
printf " %s (%s)\n" "Pure AzerothCore with no modules" "azerothcore-wotlk"
echo "1) ${MODULE_PRESET_LABELS[$DEFAULT_PRESET_SUGGESTED]:-⭐ Suggested Modules}"
echo "2) ${MODULE_PRESET_LABELS[$DEFAULT_PRESET_PLAYERBOTS]:-🤖 Playerbots + Suggested modules}"
echo "3) ⚙️ Manual selection"
echo "4) 🚫 No modules"
local menu_index=5
declare -A MENU_PRESET_INDEX=()
@@ -1180,16 +1118,13 @@ fi
for entry in "${ORDERED_PRESETS[@]}"; do
local preset_name="${entry#*::}"
[ -n "${MODULE_PRESET_CONFIGS[$preset_name]:-}" ] || continue
local pretty_name preset_desc
local pretty_name
if [ -n "${MODULE_PRESET_LABELS[$preset_name]:-}" ]; then
pretty_name="${MODULE_PRESET_LABELS[$preset_name]}"
else
pretty_name=$(echo "$preset_name" | tr '_-' ' ' | awk '{for(i=1;i<=NF;i++){$i=toupper(substr($i,1,1)) substr($i,2)}}1')
fi
preset_desc="${MODULE_PRESET_DESCRIPTIONS[$preset_name]:-No description available}"
local source_branch=$(get_preset_source_branch "$preset_name")
printf " %s) %s\n" "$menu_index" "$pretty_name"
printf " %s (%s)\n" "$preset_desc" "$source_branch"
echo "${menu_index}) ${pretty_name} (config/module-profiles/${preset_name}.json)"
MENU_PRESET_INDEX[$menu_index]="$preset_name"
menu_index=$((menu_index + 1))
done
@@ -1487,16 +1422,11 @@ fi
MODULES_CPP_LIST="$(IFS=','; printf '%s' "${enabled_cpp_module_keys[*]}")"
fi
# Determine source variant based ONLY on playerbots module
local STACK_IMAGE_MODE="standard"
local STACK_SOURCE_VARIANT="core"
if [ "$MODULE_PLAYERBOTS" = "1" ] || [ "$PLAYERBOT_ENABLED" = "1" ]; then
STACK_SOURCE_VARIANT="playerbots"
fi
# Determine image mode based on source variant and build requirements
local STACK_IMAGE_MODE="standard"
if [ "$STACK_SOURCE_VARIANT" = "playerbots" ]; then
STACK_IMAGE_MODE="playerbots"
STACK_SOURCE_VARIANT="playerbots"
elif [ "$NEEDS_CXX_REBUILD" = "1" ]; then
STACK_IMAGE_MODE="modules"
fi
@@ -1592,7 +1522,7 @@ fi
fi
local default_source_rel="${LOCAL_STORAGE_ROOT}/source/azerothcore"
if [ "$STACK_SOURCE_VARIANT" = "playerbots" ]; then
if [ "$NEEDS_CXX_REBUILD" = "1" ] || [ "$MODULE_PLAYERBOTS" = "1" ]; then
default_source_rel="${LOCAL_STORAGE_ROOT}/source/azerothcore-playerbots"
fi
@@ -1674,7 +1604,7 @@ fi
{
cat <<EOF
# Generated by setup.sh
# Generated by azerothcore-rm/setup.sh
# Compose overrides (set to 1 to include matching file under compose-overrides/)
# mysql-expose.yml -> exposes MySQL externally via COMPOSE_OVERRIDE_MYSQL_EXPOSE_ENABLED
@@ -1686,15 +1616,6 @@ COMPOSE_PROJECT_NAME=$DEFAULT_COMPOSE_PROJECT_NAME
STORAGE_PATH=$STORAGE_PATH
STORAGE_PATH_LOCAL=$LOCAL_STORAGE_ROOT
STORAGE_CONFIG_PATH=$(get_template_value "STORAGE_CONFIG_PATH")
STORAGE_LOGS_PATH=$(get_template_value "STORAGE_LOGS_PATH")
STORAGE_MODULES_PATH=$(get_template_value "STORAGE_MODULES_PATH")
STORAGE_LUA_SCRIPTS_PATH=$(get_template_value "STORAGE_LUA_SCRIPTS_PATH")
STORAGE_MODULES_META_PATH=$(get_template_value "STORAGE_MODULES_META_PATH")
STORAGE_MODULE_SQL_PATH=$(get_template_value "STORAGE_MODULE_SQL_PATH")
STORAGE_INSTALL_MARKERS_PATH=$(get_template_value "STORAGE_INSTALL_MARKERS_PATH")
STORAGE_CLIENT_DATA_PATH=$(get_template_value "STORAGE_CLIENT_DATA_PATH")
STORAGE_LOCAL_SOURCE_PATH=$(get_template_value "STORAGE_LOCAL_SOURCE_PATH")
BACKUP_PATH=$BACKUP_PATH
TZ=$DEFAULT_TZ
@@ -1763,31 +1684,10 @@ CONTAINER_USER=$CONTAINER_USER
CONTAINER_MYSQL=$DEFAULT_CONTAINER_MYSQL
CONTAINER_DB_IMPORT=$DEFAULT_CONTAINER_DB_IMPORT
CONTAINER_DB_INIT=$DEFAULT_CONTAINER_DB_INIT
CONTAINER_DB_GUARD=$(get_template_value "CONTAINER_DB_GUARD")
CONTAINER_BACKUP=$DEFAULT_CONTAINER_BACKUP
CONTAINER_MODULES=$DEFAULT_CONTAINER_MODULES
CONTAINER_POST_INSTALL=$DEFAULT_CONTAINER_POST_INSTALL
# Database Guard Defaults
DB_GUARD_RECHECK_SECONDS=$(get_template_value "DB_GUARD_RECHECK_SECONDS")
DB_GUARD_RETRY_SECONDS=$(get_template_value "DB_GUARD_RETRY_SECONDS")
DB_GUARD_WAIT_ATTEMPTS=$(get_template_value "DB_GUARD_WAIT_ATTEMPTS")
DB_GUARD_HEALTH_MAX_AGE=$(get_template_value "DB_GUARD_HEALTH_MAX_AGE")
DB_GUARD_HEALTHCHECK_INTERVAL=$(get_template_value "DB_GUARD_HEALTHCHECK_INTERVAL")
DB_GUARD_HEALTHCHECK_TIMEOUT=$(get_template_value "DB_GUARD_HEALTHCHECK_TIMEOUT")
DB_GUARD_HEALTHCHECK_RETRIES=$(get_template_value "DB_GUARD_HEALTHCHECK_RETRIES")
DB_GUARD_VERIFY_INTERVAL_SECONDS=$(get_template_value "DB_GUARD_VERIFY_INTERVAL_SECONDS")
# Module SQL staging
STAGE_PATH_MODULE_SQL=$(get_template_value "STAGE_PATH_MODULE_SQL")
# Modules rebuild source path
MODULES_REBUILD_SOURCE_PATH=$MODULES_REBUILD_SOURCE_PATH
# SQL Source Overlay
SOURCE_DIR=$(get_template_value "SOURCE_DIR")
AC_SQL_SOURCE_PATH=$(get_template_value "AC_SQL_SOURCE_PATH")
# Ports
AUTH_EXTERNAL_PORT=$AUTH_EXTERNAL_PORT
AUTH_PORT=$DEFAULT_AUTH_INTERNAL_PORT
@@ -1804,8 +1704,6 @@ REALM_PORT=$REALM_PORT
BACKUP_RETENTION_DAYS=$BACKUP_RETENTION_DAYS
BACKUP_RETENTION_HOURS=$BACKUP_RETENTION_HOURS
BACKUP_DAILY_TIME=$BACKUP_DAILY_TIME
BACKUP_INTERVAL_MINUTES=$(get_template_value "BACKUP_INTERVAL_MINUTES")
BACKUP_EXTRA_DATABASES=$(get_template_value "BACKUP_EXTRA_DATABASES")
BACKUP_HEALTHCHECK_MAX_MINUTES=$BACKUP_HEALTHCHECK_MAX_MINUTES
BACKUP_HEALTHCHECK_GRACE_SECONDS=$BACKUP_HEALTHCHECK_GRACE_SECONDS
@@ -1813,13 +1711,10 @@ EOF
echo
echo "# Modules"
for module_key in "${MODULE_KEYS[@]}"; do
local module_value="${!module_key:-0}"
# Only write enabled modules (value=1) to .env
if [ "$module_value" = "1" ]; then
printf "%s=%s\n" "$module_key" "$module_value"
fi
printf "%s=%s\n" "$module_key" "${!module_key:-0}"
done
cat <<EOF
MODULES_REBUILD_SOURCE_PATH=$MODULES_REBUILD_SOURCE_PATH
# Client data
CLIENT_DATA_VERSION=${CLIENT_DATA_VERSION:-$DEFAULT_CLIENT_DATA_VERSION}

View File

@@ -7,11 +7,11 @@ set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$ROOT_DIR"
BLUE='\033[0;34m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; RED='\033[0;31m'; NC='\033[0m'
info(){ printf '%b\n' "${BLUE} $*${NC}"; }
ok(){ printf '%b\n' "${GREEN}$*${NC}"; }
warn(){ printf '%b\n' "${YELLOW}⚠️ $*${NC}"; }
err(){ printf '%b\n' "${RED}$*${NC}"; }
# Source common library for standardized logging
if ! source "$ROOT_DIR/scripts/bash/lib/common.sh" 2>/dev/null; then
echo "❌ FATAL: Cannot load $ROOT_DIR/scripts/bash/lib/common.sh" >&2
exit 1
fi
FORCE_DIRTY=0
DEPLOY_ARGS=()