31 Commits

Author SHA1 Message Date
uprightbass360
dcb837430a fix: resolve YAML syntax errors in create-release workflow
Replaced heredocs with echo statements to avoid YAML parsing issues.
The YAML parser was interpreting markdown headings and other content
within heredocs as YAML syntax, causing validation errors.

Using grouped echo statements ({ echo ...; } > file) works correctly
with GitHub Actions YAML parser while maintaining variable expansion.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-01-10 17:19:32 -05:00
uprightbass360
61137b5510 adds workflow build for RealmMaster profile 2026-01-10 17:09:22 -05:00
uprightbass360
ce76769f79 Add graceful MySQL tmpfs sync on shutdown 2026-01-08 02:39:08 -05:00
uprightbass360
fe410a6d4d fix: mod-ale compiler patch 2026-01-05 19:25:34 -05:00
Deckard
b6b37f37c9 Update recommendations for Debian 12 compatibility
Clarified compatibility with Debian 12 regarding permissions.
2026-01-04 16:45:20 -05:00
uprightbass360
9aea5bc41e chore: sync module manifest 2026-01-03 23:39:07 -05:00
uprightbass360
63a088cc0f fix: renamed module profile 2026-01-03 13:19:00 -05:00
uprightbass360
861e924aae chore: update eluna naming 2026-01-03 04:07:45 -05:00
uprightbass360
4e8af3f7a7 cleanup ignore 2026-01-03 02:27:10 -05:00
uprightbass360
dc32e715ca chore: removes TS module and updates names 2026-01-03 02:21:35 -05:00
uprightbass360
93d3df7436 chore: docs update 2026-01-03 02:19:01 -05:00
uprightbass360
179c486f73 mod-ale finnagling 2026-01-02 18:33:10 -05:00
uprightbass360
a497f2844c cleanup: better variable handling 2026-01-02 16:18:25 -05:00
uprightbass360
b046f7f8ba fix: workaround for ale+playerbots 2026-01-02 02:53:47 -05:00
uprightbass360
f5b3b07bcb update module profiles 2026-01-01 15:50:04 -05:00
uprightbass360
07110902a6 cleanup: remove stale changelog and notes 2025-12-27 18:48:40 -05:00
uprightbass360
a67bfcd87b sets module rebuild path automatically 2025-12-27 18:41:33 -05:00
uprightbass360
b0444019ae cleans up variable expansion 2025-12-27 18:36:50 -05:00
Deckard
29d299e402 Change realmmaster status from 'active' to 'blocked' 2025-12-27 18:14:26 -05:00
Deckard
10c45716cf Remove MODULE_AZEROTHCORE_REALMMASTER from template 2025-12-27 18:14:26 -05:00
uprightbass360
3a8f076894 chore: sync module manifest 2025-12-27 18:14:26 -05:00
uprightbass360
3ec83b7714 adds fallback for workflow 2025-12-27 18:07:40 -05:00
uprightbass360
b7d55976cd updates setup language 2025-12-27 17:05:10 -05:00
uprightbass360
63b0a4ba5d adds thanks to readme 2025-12-27 17:00:05 -05:00
uprightbass360
9b9d99904a cleans up env generation and dropps disabled flags 2025-12-27 16:46:27 -05:00
uprightbass360
690ee4317c updates modules and module setup 2025-12-27 16:46:27 -05:00
uprightbass360
b8245e7b3f chore: updates modules and module updater 2025-12-27 15:30:59 -05:00
uprightbass360
6ed10dead7 add helps 2025-12-12 18:56:42 -05:00
uprightbass360
9f3038a516 flips qr generation to params 2025-12-12 18:49:17 -05:00
uprightbass360
ea3c2e750c adds pdump and 2fa generation 2025-12-12 18:33:53 -05:00
uprightbass360
63b2ca8151 backup fixes 2025-12-03 22:13:22 -05:00
51 changed files with 5296 additions and 593 deletions

321
.env.prebuilt Normal file
View File

@@ -0,0 +1,321 @@
# ================================================================================
# AzerothCore RealmMaster - Pre-Built Images Configuration
# ================================================================================
# Use this minimal configuration file to deploy pre-built RealmMaster images
# from Docker Hub. No local building required!
#
# Quick Start:
# 1. Copy this file: cp .env.prebuilt .env
# 2. Set your DOCKERHUB_USERNAME below
# 3. Run: ./deploy.sh
#
# The pre-built images include 32 modules from the RealmMaster profile:
# - Playerbots, Transmog, Solo LFG, Eluna, NPC Buffer, and 27 more
# - See config/module-profiles/RealmMaster.json for full list
# ================================================================================
# =====================
# REQUIRED: Docker Hub Configuration
# =====================
# Set this to your Docker Hub username where the images are published
DOCKERHUB_USERNAME=your-dockerhub-username
# =====================
# Project Configuration
# =====================
COMPOSE_PROJECT_NAME=azerothcore-realmmaster
# =====================
# Module Profile Selection
# =====================
# Choose which module profile build to use:
# - realmmaster: 32 modules (playerbots, transmog, solo-lfg, eluna, etc.) - ✅ Available now (Recommended)
#
# Additional profiles (available soon - will be built on demand):
# - suggested-modules: Alternative suggested module set
# - all-modules: All supported modules
# - playerbots-only: Just playerbots
#
# Note: Only 'realmmaster' images are currently published to Docker Hub.
# Other profiles will be available when built via GitHub Actions workflow.
MODULE_PROFILE=realmmaster
# =====================
# Pre-Built Images from Docker Hub
# =====================
# These images are built nightly with different module profiles
# Using profile-specific tags ensures you get the exact module set you want
AC_AUTHSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:authserver-${MODULE_PROFILE}-latest
AC_WORLDSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:worldserver-${MODULE_PROFILE}-latest
# Alternative: Use date-tagged images for version pinning
# AC_AUTHSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:authserver-${MODULE_PROFILE}-20260109
# AC_WORLDSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:worldserver-${MODULE_PROFILE}-20260109
# Alternative: Use generic latest tags (default: realmmaster profile)
# AC_AUTHSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:authserver-latest
# AC_WORLDSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:worldserver-latest
# Standard images (fallback if modules images not available)
AC_AUTHSERVER_IMAGE=acore/ac-wotlk-authserver:master
AC_WORLDSERVER_IMAGE=acore/ac-wotlk-worldserver:master
# Playerbots images (referenced by docker-compose, even if using modules profile)
AC_AUTHSERVER_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:authserver-playerbots
AC_WORLDSERVER_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:worldserver-playerbots
AC_CLIENT_DATA_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:client-data-playerbots
# Database and client data images
AC_DB_IMPORT_IMAGE=acore/ac-wotlk-db-import:master
AC_CLIENT_DATA_IMAGE=acore/ac-wotlk-client-data:master
# Helper images
ALPINE_IMAGE=alpine:latest
MYSQL_IMAGE=mysql:8.0
# =====================
# Storage Paths
# =====================
STORAGE_PATH=./storage
STORAGE_PATH_LOCAL=./local-storage
STORAGE_CONFIG_PATH=${STORAGE_PATH}/config
STORAGE_LOGS_PATH=${STORAGE_PATH}/logs
STORAGE_MODULES_PATH=${STORAGE_PATH}/modules
STORAGE_LUA_SCRIPTS_PATH=${STORAGE_PATH}/lua_scripts
STORAGE_MODULE_SQL_PATH=${STORAGE_PATH}/module-sql-updates
STORAGE_INSTALL_MARKERS_PATH=${STORAGE_PATH}/install-markers
STORAGE_CLIENT_DATA_PATH=${STORAGE_PATH}/client-data
BACKUP_PATH=${STORAGE_PATH}/backups
# =====================
# Timezone
# =====================
HOST_ZONEINFO_PATH=/usr/share/zoneinfo
TZ=UTC
# =====================
# Networking
# =====================
NETWORK_NAME=azerothcore
NETWORK_SUBNET=172.20.0.0/16
NETWORK_GATEWAY=172.20.0.1
# =====================
# Server Address & Realm
# =====================
# Change this to your server's public IP or domain name
SERVER_ADDRESS=127.0.0.1
REALM_PORT=8215
# =====================
# Ports
# =====================
# Authentication server
AUTH_EXTERNAL_PORT=3784
AUTH_PORT=3724
# World server
WORLD_EXTERNAL_PORT=8215
WORLD_PORT=8085
# SOAP/Remote access
SOAP_EXTERNAL_PORT=7778
SOAP_PORT=7878
# MySQL database (for external access)
MYSQL_EXTERNAL_PORT=64306
# phpMyAdmin web interface
PMA_EXTERNAL_PORT=8081
# Keira3 editor interface
KEIRA3_EXTERNAL_PORT=4201
# =====================
# MySQL Database Configuration
# =====================
MYSQL_IMAGE=mysql:8.0
CONTAINER_MYSQL=ac-mysql
MYSQL_HOST=ac-mysql
MYSQL_PORT=3306
# Security: Change these passwords!
MYSQL_ROOT_PASSWORD=azerothcore123
MYSQL_ROOT_HOST=%
MYSQL_USER=root
# Database names
DB_AUTH_NAME=acore_auth
DB_WORLD_NAME=acore_world
DB_CHARACTERS_NAME=acore_characters
DB_PLAYERBOTS_NAME=acore_playerbots
# Database performance settings
MYSQL_CHARACTER_SET=utf8mb4
MYSQL_COLLATION=utf8mb4_unicode_ci
MYSQL_MAX_CONNECTIONS=1000
MYSQL_INNODB_BUFFER_POOL_SIZE=256M
MYSQL_INNODB_LOG_FILE_SIZE=64M
MYSQL_INNODB_REDO_LOG_CAPACITY=512M
# MySQL tmpfs (RAM disk) for performance
MYSQL_RUNTIME_TMPFS_SIZE=8G
MYSQL_DISABLE_BINLOG=1
# Database connection settings
DB_WAIT_RETRIES=60
DB_WAIT_SLEEP=10
DB_RECONNECT_SECONDS=5
DB_RECONNECT_ATTEMPTS=5
# Database worker threads
DB_LOGIN_WORKER_THREADS=1
DB_WORLD_WORKER_THREADS=1
DB_CHARACTER_WORKER_THREADS=1
DB_LOGIN_SYNCH_THREADS=1
DB_WORLD_SYNCH_THREADS=1
DB_CHARACTER_SYNCH_THREADS=1
# =====================
# Automated Backups
# =====================
BACKUP_RETENTION_DAYS=3
BACKUP_RETENTION_HOURS=6
BACKUP_DAILY_TIME=09:00
BACKUP_INTERVAL_MINUTES=60
BACKUP_EXTRA_DATABASES=
BACKUP_HEALTHCHECK_MAX_MINUTES=1440
BACKUP_HEALTHCHECK_GRACE_SECONDS=4500
# =====================
# Module Configuration (Pre-Built)
# =====================
# These settings tell the system that modules are already built into the images
STACK_IMAGE_MODE=modules
STACK_SOURCE_VARIANT=playerbots
# Key modules enabled (needed for profile detection)
# The RealmMaster profile includes playerbots, so we need this set for deploy.sh to use the correct profile
MODULE_PLAYERBOTS=1
# Note: MODULES_ENABLED_LIST varies by profile - the list below is for the 'realmmaster' profile
# For other profiles, see the corresponding JSON in config/module-profiles/
MODULES_ENABLED_LIST=MODULE_PLAYERBOTS,MODULE_TRANSMOG,MODULE_SOLO_LFG,MODULE_ELUNA,MODULE_AIO,MODULE_NPC_BUFFER,MODULE_NPC_BEASTMASTER,MODULE_SOLOCRAFT,MODULE_1V1_ARENA,MODULE_ACCOUNT_ACHIEVEMENTS,MODULE_ACTIVE_CHAT,MODULE_ARAC,MODULE_ASSISTANT,MODULE_AUTO_REVIVE,MODULE_BLACK_MARKET_AUCTION_HOUSE,MODULE_BOSS_ANNOUNCER,MODULE_BREAKING_NEWS,MODULE_ELUNA_SCRIPTS,MODULE_EVENT_SCRIPTS,MODULE_FIREWORKS,MODULE_GAIN_HONOR_GUARD,MODULE_GLOBAL_CHAT,MODULE_GUILDHOUSE,MODULE_INSTANCE_RESET,MODULE_ITEM_LEVEL_UP,MODULE_LEARN_SPELLS,MODULE_MORPHSUMMON,MODULE_NPC_ENCHANTER,MODULE_NPC_FREE_PROFESSIONS,MODULE_RANDOM_ENCHANTS,MODULE_REAGENT_BANK,MODULE_TIME_IS_TIME
MODULES_CPP_LIST=
MODULES_REQUIRES_CUSTOM_BUILD=0
MODULES_REQUIRES_PLAYERBOT_SOURCE=1
# =====================
# Playerbot Runtime Configuration
# =====================
# Enable/disable playerbots and set population
PLAYERBOT_ENABLED=1
PLAYERBOT_MIN_BOTS=40
PLAYERBOT_MAX_BOTS=200
# =====================
# Client Data
# =====================
# Client data version (auto-detected when blank)
CLIENT_DATA_VERSION=
CLIENT_DATA_PATH=
# =====================
# Server Configuration Preset
# =====================
# Apply a configuration preset during deployment
# Options: none, blizzlike, fast-leveling, hardcore-pvp, casual-pve
SERVER_CONFIG_PRESET=none
# =====================
# Eluna Lua Scripting
# =====================
AC_ELUNA_ENABLED=1
AC_ELUNA_TRACE_BACK=1
AC_ELUNA_AUTO_RELOAD=1
AC_ELUNA_BYTECODE_CACHE=1
AC_ELUNA_SCRIPT_PATH=lua_scripts
AC_ELUNA_REQUIRE_PATHS=
AC_ELUNA_REQUIRE_CPATHS=
AC_ELUNA_AUTO_RELOAD_INTERVAL=1
# =====================
# Container Management
# =====================
CONTAINER_USER=0:0
CONTAINER_DB_IMPORT=ac-db-import
CONTAINER_DB_INIT=ac-db-init
CONTAINER_DB_GUARD=ac-db-guard
CONTAINER_BACKUP=ac-backup
CONTAINER_MODULES=ac-modules
CONTAINER_POST_INSTALL=ac-post-install
# =====================
# Database Guard
# =====================
DB_GUARD_RECHECK_SECONDS=120
DB_GUARD_RETRY_SECONDS=10
DB_GUARD_WAIT_ATTEMPTS=60
DB_GUARD_HEALTH_MAX_AGE=180
DB_GUARD_HEALTHCHECK_INTERVAL=30s
DB_GUARD_HEALTHCHECK_TIMEOUT=10s
DB_GUARD_HEALTHCHECK_RETRIES=5
DB_GUARD_VERIFY_INTERVAL_SECONDS=86400
# =====================
# Health Checks
# =====================
# MySQL health checks
MYSQL_HEALTHCHECK_INTERVAL=20s
MYSQL_HEALTHCHECK_TIMEOUT=15s
MYSQL_HEALTHCHECK_RETRIES=25
MYSQL_HEALTHCHECK_START_PERIOD=120s
# Auth server health checks
AUTH_HEALTHCHECK_INTERVAL=30s
AUTH_HEALTHCHECK_TIMEOUT=10s
AUTH_HEALTHCHECK_RETRIES=3
AUTH_HEALTHCHECK_START_PERIOD=60s
# World server health checks
WORLD_HEALTHCHECK_INTERVAL=30s
WORLD_HEALTHCHECK_TIMEOUT=10s
WORLD_HEALTHCHECK_RETRIES=3
WORLD_HEALTHCHECK_START_PERIOD=120s
# Backup health checks
BACKUP_HEALTHCHECK_INTERVAL=60s
BACKUP_HEALTHCHECK_TIMEOUT=30s
BACKUP_HEALTHCHECK_RETRIES=3
BACKUP_HEALTHCHECK_START_PERIOD=120s
# =====================
# Management Tools
# =====================
# phpMyAdmin configuration
PMA_HOST=ac-mysql
PMA_PORT=3306
PMA_USER=root
PMA_ARBITRARY=1
PMA_ABSOLUTE_URI=
PMA_UPLOAD_LIMIT=300M
PMA_MEMORY_LIMIT=512M
PMA_MAX_EXECUTION_TIME=600
# Keira3 configuration
KEIRA_DATABASE_HOST=ac-mysql
KEIRA_DATABASE_PORT=3306
# =====================
# Compose Overrides
# =====================
# Enable optional compose overrides (set to 1 to enable)
COMPOSE_OVERRIDE_MYSQL_EXPOSE_ENABLED=0
COMPOSE_OVERRIDE_WORLDSERVER_DEBUG_LOGGING_ENABLED=0
# =====================
# DO NOT MODIFY BELOW (Build-related, not used with pre-built images)
# =====================
AUTO_REBUILD_ON_DEPLOY=0
DB_UPDATES_ALLOWED_MODULES=all
DB_UPDATES_REDUNDANCY=1

View File

@@ -14,7 +14,7 @@ COMPOSE_OVERRIDE_WORLDSERVER_DEBUG_LOGGING_ENABLED=0
# Project name # Project name
# ===================== # =====================
# Customize this to match your deployment slug (used for container names/tags) # Customize this to match your deployment slug (used for container names/tags)
COMPOSE_PROJECT_NAME=azerothcore-stack COMPOSE_PROJECT_NAME=azerothcore-realmmaster
# ===================== # =====================
# Storage & Timezone # Storage & Timezone
@@ -76,10 +76,17 @@ DB_GUARD_VERIFY_INTERVAL_SECONDS=86400
# ===================== # =====================
STAGE_PATH_MODULE_SQL=${STORAGE_MODULE_SQL_PATH} STAGE_PATH_MODULE_SQL=${STORAGE_MODULE_SQL_PATH}
# =====================
# Modules rebuild source path
# =====================
# Default AzerothCore source checkout used for module rebuilds
MODULES_REBUILD_SOURCE_PATH=${STORAGE_PATH_LOCAL}/source/azerothcore
# ===================== # =====================
# SQL Source Overlay # SQL Source Overlay
# ===================== # =====================
AC_SQL_SOURCE_PATH=${STORAGE_LOCAL_SOURCE_PATH}/azerothcore-playerbots/data/sql SOURCE_DIR=${MODULES_REBUILD_SOURCE_PATH}
AC_SQL_SOURCE_PATH=${MODULES_REBUILD_SOURCE_PATH}/data/sql
# ===================== # =====================
# Images # Images
@@ -89,15 +96,15 @@ AC_DB_IMPORT_IMAGE=acore/ac-wotlk-db-import:master
AC_AUTHSERVER_IMAGE=acore/ac-wotlk-authserver:master AC_AUTHSERVER_IMAGE=acore/ac-wotlk-authserver:master
AC_WORLDSERVER_IMAGE=acore/ac-wotlk-worldserver:master AC_WORLDSERVER_IMAGE=acore/ac-wotlk-worldserver:master
# Services (Playerbots) # Services (Playerbots)
AC_AUTHSERVER_IMAGE_PLAYERBOTS=azerothcore-realmmaster:authserver-playerbots AC_AUTHSERVER_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:authserver-playerbots
AC_WORLDSERVER_IMAGE_PLAYERBOTS=azerothcore-realmmaster:worldserver-playerbots AC_WORLDSERVER_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:worldserver-playerbots
# Services (Module Build Tags) # Services (Module Build Tags)
# Images used during module compilation and tagging # Images used during module compilation and tagging
AC_AUTHSERVER_IMAGE_MODULES=azerothcore-realmmaster:authserver-modules-latest AC_AUTHSERVER_IMAGE_MODULES=${COMPOSE_PROJECT_NAME}:authserver-modules-latest
AC_WORLDSERVER_IMAGE_MODULES=azerothcore-realmmaster:worldserver-modules-latest AC_WORLDSERVER_IMAGE_MODULES=${COMPOSE_PROJECT_NAME}:worldserver-modules-latest
# Client Data # Client Data
AC_CLIENT_DATA_IMAGE=acore/ac-wotlk-client-data:master AC_CLIENT_DATA_IMAGE=acore/ac-wotlk-client-data:master
AC_CLIENT_DATA_IMAGE_PLAYERBOTS=uprightbass360/azerothcore-wotlk-playerbots:client-data-Playerbot AC_CLIENT_DATA_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:client-data-playerbots
# Build artifacts # Build artifacts
DOCKER_IMAGE_TAG=master DOCKER_IMAGE_TAG=master
AC_AUTHSERVER_IMAGE_BASE=acore/ac-wotlk-authserver AC_AUTHSERVER_IMAGE_BASE=acore/ac-wotlk-authserver
@@ -148,6 +155,7 @@ MYSQL_MAX_CONNECTIONS=1000
MYSQL_INNODB_BUFFER_POOL_SIZE=256M MYSQL_INNODB_BUFFER_POOL_SIZE=256M
MYSQL_INNODB_LOG_FILE_SIZE=64M MYSQL_INNODB_LOG_FILE_SIZE=64M
MYSQL_INNODB_REDO_LOG_CAPACITY=512M MYSQL_INNODB_REDO_LOG_CAPACITY=512M
# MySQL runs on tmpfs (RAM) for performance, with sync to persistent storage on shutdown
MYSQL_RUNTIME_TMPFS_SIZE=8G MYSQL_RUNTIME_TMPFS_SIZE=8G
MYSQL_DISABLE_BINLOG=1 MYSQL_DISABLE_BINLOG=1
MYSQL_CONFIG_DIR=${STORAGE_CONFIG_PATH}/mysql/conf.d MYSQL_CONFIG_DIR=${STORAGE_CONFIG_PATH}/mysql/conf.d
@@ -219,6 +227,8 @@ MODULES_REQUIRES_PLAYERBOT_SOURCE=0
# Only set this if you need to override the auto-detected version # Only set this if you need to override the auto-detected version
# Example: v18.0, v17.0, etc. # Example: v18.0, v17.0, etc.
CLIENT_DATA_VERSION= CLIENT_DATA_VERSION=
# Client data path for deployment (auto-calculated when left blank)
CLIENT_DATA_PATH=
# ===================== # =====================
# Server Configuration # Server Configuration
@@ -228,174 +238,10 @@ CLIENT_DATA_VERSION=
SERVER_CONFIG_PRESET=none SERVER_CONFIG_PRESET=none
CLIENT_DATA_CACHE_PATH=${STORAGE_PATH_LOCAL}/client-data-cache CLIENT_DATA_CACHE_PATH=${STORAGE_PATH_LOCAL}/client-data-cache
# =====================
# Module toggles (0/1)
# =====================
# Enable/disable modules by setting to 1 (enabled) or 0 (disabled)
# Modules are organized by category for easier navigation
# 🤖 Automation
# Playerbot and AI systems
MODULE_NPCBOT_EXTENDED_COMMANDS=0
MODULE_OLLAMA_CHAT=0
# mod-playerbots: Installs SQL/config assets; core functionality is built into playerbot images
MODULE_PLAYERBOTS=0
MODULE_PLAYER_BOT_LEVEL_BRACKETS=0
# ✨ Quality of Life
# Convenience features that improve gameplay experience
MODULE_AOE_LOOT=0
MODULE_AUTO_REVIVE=0
MODULE_FIREWORKS=0
MODULE_INSTANCE_RESET=0
MODULE_LEARN_SPELLS=0
MODULE_SOLO_LFG=0
# ⚔️ Gameplay Enhancement
# Core gameplay improvements and mechanics
MODULE_AUTOBALANCE=0
MODULE_CHALLENGE_MODES=0
MODULE_DUEL_RESET=0
MODULE_DUNGEON_RESPAWN=0
MODULE_HARDCORE_MODE=0
MODULE_HORADRIC_CUBE=0
MODULE_SOLOCRAFT=0
MODULE_STATBOOSTER=0
MODULE_TIME_IS_TIME=0
# 🏪 NPC Services
# Service NPCs that provide player utilities
MODULE_ASSISTANT=0
MODULE_MULTIVENDOR=0
MODULE_NPC_BEASTMASTER=0
MODULE_NPC_BUFFER=0
MODULE_NPC_ENCHANTER=0
MODULE_NPC_FREE_PROFESSIONS=0
# mod-npc-talent-template: Admin commands: .templatenpc create [TemplateName] and .templatenpc reload
MODULE_NPC_TALENT_TEMPLATE=0
MODULE_REAGENT_BANK=0
MODULE_TRANSMOG=0
# ⚡ PvP
# Player vs Player focused modules
MODULE_1V1_ARENA=0
# mod-arena-replay: NPC ID: 98500; known issue: players who were participants experience unusual behavior when watching their own replay
MODULE_ARENA_REPLAY=0
MODULE_GAIN_HONOR_GUARD=0
MODULE_PHASED_DUELS=0
MODULE_PVP_TITLES=0
MODULE_ULTIMATE_FULL_LOOT_PVP=0
# 📈 Progression
# Character and server progression systems
MODULE_DYNAMIC_XP=0
MODULE_INDIVIDUAL_PROGRESSION=0
MODULE_ITEM_LEVEL_UP=0
MODULE_LEVEL_GRANT=0
# mod-progression-system: SQL files cannot be unloaded once executed; requires auto DB updater enabled in worldserver config
MODULE_PROGRESSION_SYSTEM=0
MODULE_PROMOTION_AZEROTHCORE=0
MODULE_WEEKEND_XP=0
# mod-zone-difficulty: Mythicmode NPC 1128001 spawned in raids/heroic dungeons; NPC 1128002 for Mythicmode rewards
MODULE_ZONE_DIFFICULTY=0
# 💰 Economy
# Auction house, trading, and economic systems
MODULE_AHBOT=0
MODULE_BLACK_MARKET_AUCTION_HOUSE=0
MODULE_DYNAMIC_TRADER=0
MODULE_EXCHANGE_NPC=0
MODULE_GLOBAL_MAIL_BANKING_AUCTIONS=0
MODULE_LOTTERY_LUA=0
MODULE_LUA_AH_BOT=0
MODULE_RANDOM_ENCHANTS=0
# 👥 Social
# Social and community features
MODULE_ACTIVE_CHAT=0
MODULE_BOSS_ANNOUNCER=0
MODULE_BREAKING_NEWS=0
MODULE_DISCORD_NOTIFIER=0
MODULE_GLOBAL_CHAT=0
MODULE_TEMP_ANNOUNCEMENTS=0
# 👤 Account-Wide
# Features that apply across all characters on an account
MODULE_ACCOUNTWIDE_SYSTEMS=0
MODULE_ACCOUNT_ACHIEVEMENTS=0
MODULE_ACCOUNT_MOUNTS=0
# 🎨 Customization
# Character and appearance customization
MODULE_ARAC=0
# mod-morphsummon: Allows customization of summoned creature appearances (Warlock demons, Death Knight ghouls, Mage water elementals); NPC ID: 601072
MODULE_MORPHSUMMON=0
MODULE_TRANSMOG_AIO=0
MODULE_WORGOBLIN=0
# 📜 Scripting
# Lua/Eluna scripting frameworks and tools
# mod-aio: Azeroth Interface Override - enables client-server interface communication
MODULE_AIO=0
MODULE_ELUNA=1
MODULE_ELUNA_SCRIPTS=0
MODULE_ELUNA_TS=0
MODULE_EVENT_SCRIPTS=0
# 🔧 Admin Tools
# Server administration and management utilities
MODULE_ANTIFARMING=0
MODULE_CARBON_COPY=0
# mod-keep-out: Requires editing database table mod_mko_map_lock; use .gps command to obtain map and zone IDs
MODULE_KEEP_OUT=0
MODULE_SEND_AND_BIND=0
MODULE_SERVER_AUTO_SHUTDOWN=0
# mod-spell-regulator: WARNING: Custom code changes mandatory before module functions; requires custom hooks from external gist
MODULE_SPELL_REGULATOR=0
MODULE_WHO_LOGGED=0
MODULE_ZONE_CHECK=0
# 💎 Premium/VIP
# Premium account and VIP systems
MODULE_ACORE_SUBSCRIPTIONS=0
# mod-premium: Script must be assigned to an item (like hearthstone) using script name 'premium_account'
MODULE_PREMIUM=0
MODULE_SYSTEM_VIP=0
# 🎮 Mini-Games
# Fun and entertainment features
MODULE_AIO_BLACKJACK=0
MODULE_POCKET_PORTAL=0
# mod-tic-tac-toe: NPC ID: 100155
MODULE_TIC_TAC_TOE=0
# 🏰 Content
# Additional game content and features
MODULE_AZEROTHSHARD=0
MODULE_BG_SLAVERYVALLEY=0
MODULE_GUILDHOUSE=0
MODULE_TREASURE_CHEST_SYSTEM=0
MODULE_WAR_EFFORT=0
# 🎁 Rewards
# Player reward and incentive systems
MODULE_LEVEL_UP_REWARD=0
MODULE_PRESTIGE_DRAFT_MODE=0
MODULE_RECRUIT_A_FRIEND=0
# mod-resurrection-scroll: Requires EnablePlayerSettings to be enabled in worldserver config file
MODULE_RESURRECTION_SCROLL=0
MODULE_REWARD_PLAYED_TIME=0
# 🛠️ Developer Tools
# Development and testing utilities
MODULE_SKELETON_MODULE=0
# ===================== # =====================
# Rebuild automation # Rebuild automation
# ===================== # =====================
AUTO_REBUILD_ON_DEPLOY=0 AUTO_REBUILD_ON_DEPLOY=0
# Default AzerothCore source checkout used for module rebuilds
MODULES_REBUILD_SOURCE_PATH=${STORAGE_PATH_LOCAL}/source/azerothcore
# ===================== # =====================
# Source repositories # Source repositories
@@ -452,39 +298,111 @@ KEIRA_DATABASE_HOST=ac-mysql
KEIRA_DATABASE_PORT=3306 KEIRA_DATABASE_PORT=3306
# Auto-generated defaults for new modules # Auto-generated defaults for new modules
MODULE_NPCBOT_EXTENDED_COMMANDS=0
MODULE_OLLAMA_CHAT=0
MODULE_PLAYERBOTS=0
MODULE_PLAYER_BOT_LEVEL_BRACKETS=0
MODULE_AOE_LOOT=0
MODULE_AUTO_REVIVE=0
MODULE_FIREWORKS=0
MODULE_INSTANCE_RESET=0
MODULE_LEARN_SPELLS=0
MODULE_SOLO_LFG=0
MODULE_AUTOBALANCE=0
MODULE_DUEL_RESET=0
MODULE_HARDCORE_MODE=0
MODULE_HORADRIC_CUBE=0
MODULE_SOLOCRAFT=0
MODULE_TIME_IS_TIME=0
MODULE_ASSISTANT=0
MODULE_NPC_BEASTMASTER=0
MODULE_NPC_BUFFER=0
MODULE_NPC_ENCHANTER=0
MODULE_NPC_FREE_PROFESSIONS=0
MODULE_NPC_TALENT_TEMPLATE=0
MODULE_REAGENT_BANK=0
MODULE_TRANSMOG=0
MODULE_1V1_ARENA=0
MODULE_ARENA_REPLAY=0
MODULE_GAIN_HONOR_GUARD=0
MODULE_PHASED_DUELS=0
MODULE_PVP_TITLES=0
MODULE_ULTIMATE_FULL_LOOT_PVP=0
MODULE_DYNAMIC_XP=0
MODULE_INDIVIDUAL_PROGRESSION=0
MODULE_ITEM_LEVEL_UP=0
MODULE_PROGRESSION_SYSTEM=0
MODULE_PROMOTION_AZEROTHCORE=0
MODULE_WEEKEND_XP=0
MODULE_ZONE_DIFFICULTY=0
MODULE_DYNAMIC_TRADER=0
MODULE_EXCHANGE_NPC=0
MODULE_GLOBAL_MAIL_BANKING_AUCTIONS=0
MODULE_LOTTERY_LUA=0
MODULE_LUA_AH_BOT=0
MODULE_RANDOM_ENCHANTS=0
MODULE_ACTIVE_CHAT=0
MODULE_BOSS_ANNOUNCER=0
MODULE_BREAKING_NEWS=0
MODULE_DISCORD_NOTIFIER=0
MODULE_GLOBAL_CHAT=0
MODULE_TEMP_ANNOUNCEMENTS=0
MODULE_ACCOUNTWIDE_SYSTEMS=0
MODULE_ACCOUNT_ACHIEVEMENTS=0
MODULE_ACCOUNT_MOUNTS=0
MODULE_ARAC=0
MODULE_MORPHSUMMON=0
MODULE_TRANSMOG_AIO=0
MODULE_WORGOBLIN=0
MODULE_AIO=0
MODULE_ELUNA=1
MODULE_ELUNA_SCRIPTS=0
MODULE_ELUNA_TS=0
MODULE_EVENT_SCRIPTS=0
MODULE_ANTIFARMING=0
MODULE_CARBON_COPY=0
MODULE_KEEP_OUT=0
MODULE_SEND_AND_BIND=0
MODULE_SERVER_AUTO_SHUTDOWN=0
MODULE_SPELL_REGULATOR=0
MODULE_WHO_LOGGED=0
MODULE_ZONE_CHECK=0
MODULE_PREMIUM=0
MODULE_SYSTEM_VIP=0
MODULE_AIO_BLACKJACK=0
MODULE_TIC_TAC_TOE=0
MODULE_BG_SLAVERYVALLEY=0
MODULE_GUILDHOUSE=0
MODULE_TREASURE_CHEST_SYSTEM=0
MODULE_WAR_EFFORT=0
MODULE_LEVEL_UP_REWARD=0
MODULE_PRESTIGE_DRAFT_MODE=0
MODULE_RECRUIT_A_FRIEND=0
MODULE_RESURRECTION_SCROLL=0
MODULE_REWARD_PLAYED_TIME=0
MODULE_SKELETON_MODULE=0
MODULE_1V1_PVP_SYSTEM=0 MODULE_1V1_PVP_SYSTEM=0
MODULE_ACI=0
MODULE_ACORE_API=0
MODULE_ACORE_BG_END_ANNOUNCER=0 MODULE_ACORE_BG_END_ANNOUNCER=0
MODULE_ACORE_BOX=0 MODULE_ACORE_BOX=0
MODULE_ACORE_CLIENT=0
MODULE_ACORE_CMS=0
MODULE_ACORE_ELUNATEST=0 MODULE_ACORE_ELUNATEST=0
MODULE_ACORE_LINUX_RESTARTER=0 MODULE_ACORE_LINUX_RESTARTER=0
MODULE_ACORE_LUA_UNLIMITED_AMMO=0 MODULE_ACORE_LUA_UNLIMITED_AMMO=0
MODULE_ACORE_LXD_IMAGE=0 MODULE_ACORE_LXD_IMAGE=0
MODULE_ACORE_MALL=0 MODULE_ACORE_MALL=0
MODULE_ACORE_MINI_REG_PAGE=0 MODULE_ACORE_MINI_REG_PAGE=0
MODULE_ACORE_NODE_SERVER=0
MODULE_ACORE_PWA=0
MODULE_ACORE_SOD=0 MODULE_ACORE_SOD=0
MODULE_ACORE_SUMMONALL=0 MODULE_ACORE_SUMMONALL=0
MODULE_ACORE_TILEMAP=0
MODULE_ACORE_ZONEDEBUFF=0 MODULE_ACORE_ZONEDEBUFF=0
MODULE_ACREBUILD=0 MODULE_ACREBUILD=0
MODULE_ADDON_FACTION_FREE_UNIT_POPUP=0 MODULE_ADDON_FACTION_FREE_UNIT_POPUP=0
MODULE_AOE_LOOT_MERGE=0 MODULE_AOE_LOOT_MERGE=0
MODULE_APAW=0
MODULE_ARENA_SPECTATOR=0 MODULE_ARENA_SPECTATOR=0
MODULE_ARENA_STATS=0
MODULE_ATTRIBOOST=0
MODULE_AUTO_CHECK_RESTART=0 MODULE_AUTO_CHECK_RESTART=0
MODULE_AZEROTHCOREADMIN=0 MODULE_AZEROTHCOREADMIN=0
MODULE_AZEROTHCOREDISCORDBOT=0 MODULE_AZEROTHCOREDISCORDBOT=0
MODULE_AZEROTHCORE_ADDITIONS=0 MODULE_AZEROTHCORE_ADDITIONS=0
MODULE_AZEROTHCORE_ALL_STACKABLES_200=0 MODULE_AZEROTHCORE_ALL_STACKABLES_200=0
MODULE_AZEROTHCORE_ANSIBLE=0 MODULE_AZEROTHCORE_ANSIBLE=0
MODULE_AZEROTHCORE_ARMORY=0
MODULE_AZEROTHCORE_LUA_ARENA_MASTER_COMMAND=0 MODULE_AZEROTHCORE_LUA_ARENA_MASTER_COMMAND=0
MODULE_AZEROTHCORE_LUA_DEMON_MORPHER=0 MODULE_AZEROTHCORE_LUA_DEMON_MORPHER=0
MODULE_AZEROTHCORE_PASSRESET=0 MODULE_AZEROTHCORE_PASSRESET=0
@@ -494,41 +412,25 @@ MODULE_AZEROTHCORE_TRIVIA_SYSTEM=0
MODULE_AZEROTHCORE_WEBSITE=0 MODULE_AZEROTHCORE_WEBSITE=0
MODULE_AZEROTHCORE_WOWHEAD_MOD_LUA=0 MODULE_AZEROTHCORE_WOWHEAD_MOD_LUA=0
MODULE_AZTRAL_AIRLINES=0 MODULE_AZTRAL_AIRLINES=0
MODULE_BGQUEUECHECKER=0
MODULE_BG_QUEUE_ABUSER_VIEWER=0
MODULE_BLIZZLIKE_TELES=0 MODULE_BLIZZLIKE_TELES=0
MODULE_BREAKINGNEWSOVERRIDE=0
MODULE_CLASSIC_MODE=0 MODULE_CLASSIC_MODE=0
MODULE_CODEBASE=0 MODULE_CODEBASE=0
MODULE_CONFIG_RATES=0 MODULE_CONFIG_RATES=0
MODULE_DEVJOESTAR=0 MODULE_DEVJOESTAR=0
MODULE_ELUNA_WOW_SCRIPTS=0
MODULE_EXTENDEDXP=0
MODULE_EXTENDED_HOLIDAYS_LUA=0 MODULE_EXTENDED_HOLIDAYS_LUA=0
MODULE_FFAFIX=0
MODULE_FLAG_CHECKER=0 MODULE_FLAG_CHECKER=0
MODULE_GUILDBANKTABFEEFIXER=0 MODULE_GUILDBANKTABFEEFIXER=0
MODULE_HARDMODE=0
MODULE_HEARTHSTONE_COOLDOWNS=0 MODULE_HEARTHSTONE_COOLDOWNS=0
MODULE_ITEMBROADCASTGUILDCHAT=0
MODULE_KARGATUM_SYSTEM=0
MODULE_KEIRA3=0 MODULE_KEIRA3=0
MODULE_LOTTERY_CHANCE_INSTANT=0 MODULE_LOTTERY_CHANCE_INSTANT=0
MODULE_LUA_AIO_MODRATE_EXP=0 MODULE_LUA_AIO_MODRATE_EXP=0
MODULE_LUA_COMMAND_PLUS=0 MODULE_LUA_COMMAND_PLUS=0
MODULE_LUA_ITEMUPGRADER_TEMPLATE=0 MODULE_LUA_ITEMUPGRADER_TEMPLATE=0
MODULE_LUA_NOTONLY_RANDOMMORPHER=0
MODULE_LUA_PARAGON_ANNIVERSARY=0
MODULE_LUA_PVP_TITLES_RANKING_SYSTEM=0 MODULE_LUA_PVP_TITLES_RANKING_SYSTEM=0
MODULE_LUA_SCRIPTS=0 MODULE_LUA_SCRIPTS=0
MODULE_LUA_SUPER_BUFFERNPC=0
MODULE_LUA_VIP=0 MODULE_LUA_VIP=0
MODULE_MOD_ACCOUNTBOUND=0
MODULE_MOD_ACCOUNT_VANITY_PETS=0 MODULE_MOD_ACCOUNT_VANITY_PETS=0
MODULE_MOD_ACTIVATEZONES=0
MODULE_MOD_AH_BOT_PLUS=0 MODULE_MOD_AH_BOT_PLUS=0
MODULE_MOD_ALPHA_REWARDS=0
MODULE_MOD_AOE_LOOT=0
MODULE_MOD_APPRECIATION=0 MODULE_MOD_APPRECIATION=0
MODULE_MOD_ARENA_TIGERSPEAK=0 MODULE_MOD_ARENA_TIGERSPEAK=0
MODULE_MOD_ARENA_TOLVIRON=0 MODULE_MOD_ARENA_TOLVIRON=0
@@ -539,44 +441,29 @@ MODULE_MOD_BG_ITEM_REWARD=0
MODULE_MOD_BG_REWARD=0 MODULE_MOD_BG_REWARD=0
MODULE_MOD_BG_TWINPEAKS=0 MODULE_MOD_BG_TWINPEAKS=0
MODULE_MOD_BIENVENIDA=0 MODULE_MOD_BIENVENIDA=0
MODULE_MOD_BLACK_MARKET=0
MODULE_MOD_BRAWLERS_GUILD=0
MODULE_MOD_BUFF_COMMAND=0 MODULE_MOD_BUFF_COMMAND=0
MODULE_MOD_CFPVE=0 MODULE_MOD_CFPVE=0
MODULE_MOD_CHANGEABLESPAWNRATES=0
MODULE_MOD_CHARACTER_SERVICES=0 MODULE_MOD_CHARACTER_SERVICES=0
MODULE_MOD_CHARACTER_TOOLS=0
MODULE_MOD_CHAT_TRANSMITTER=0 MODULE_MOD_CHAT_TRANSMITTER=0
MODULE_MOD_CHROMIE_XP=0 MODULE_MOD_CHROMIE_XP=0
MODULE_MOD_CONGRATS_ON_LEVEL=0 MODULE_MOD_CONGRATS_ON_LEVEL=0
MODULE_MOD_COSTUMES=0 MODULE_MOD_COSTUMES=0
MODULE_MOD_CRAFTSPEED=0 MODULE_MOD_CRAFTSPEED=0
MODULE_MOD_CTA_SWITCH=0 MODULE_MOD_CTA_SWITCH=0
MODULE_MOD_DEAD_MEANS_DEAD=0
MODULE_MOD_DEATHROLL_AIO=0 MODULE_MOD_DEATHROLL_AIO=0
MODULE_MOD_DEMONIC_PACT_CLASSIC=0 MODULE_MOD_DEMONIC_PACT_CLASSIC=0
MODULE_MOD_DESERTION_WARNINGS=0 MODULE_MOD_DESERTION_WARNINGS=0
MODULE_MOD_DISCORD_ANNOUNCE=0 MODULE_MOD_DISCORD_ANNOUNCE=0
MODULE_MOD_DISCORD_WEBHOOK=0
MODULE_MOD_DMF_SWITCH=0 MODULE_MOD_DMF_SWITCH=0
MODULE_MOD_DUNGEONMASTER=0 MODULE_MOD_DUNGEONMASTER=0
MODULE_MOD_DUNGEON_SCALE=0
MODULE_MOD_DYNAMIC_LOOT_RATES=0
MODULE_MOD_DYNAMIC_RESURRECTIONS=0
MODULE_MOD_ENCOUNTER_LOGS=0
MODULE_MOD_FACTION_FREE=0 MODULE_MOD_FACTION_FREE=0
MODULE_MOD_FIRSTLOGIN_AIO=0
MODULE_MOD_FLIGHTMASTER_WHISTLE=0 MODULE_MOD_FLIGHTMASTER_WHISTLE=0
MODULE_MOD_FORTIS_AUTOBALANCE=0 MODULE_MOD_FORTIS_AUTOBALANCE=0
MODULE_MOD_GAME_STATE_API=0
MODULE_MOD_GEDDON_BINDING_SHARD=0 MODULE_MOD_GEDDON_BINDING_SHARD=0
MODULE_MOD_GHOST_SPEED=0 MODULE_MOD_GHOST_SPEED=0
MODULE_MOD_GLOBALCHAT=0
MODULE_MOD_GM_COMMANDS=0 MODULE_MOD_GM_COMMANDS=0
MODULE_MOD_GOMOVE=0
MODULE_MOD_GROWNUP=0 MODULE_MOD_GROWNUP=0
MODULE_MOD_GUILDFUNDS=0 MODULE_MOD_GUILDFUNDS=0
MODULE_MOD_GUILD_VILLAGE=0
MODULE_MOD_GUILD_ZONE_SYSTEM=0 MODULE_MOD_GUILD_ZONE_SYSTEM=0
MODULE_MOD_HARDCORE=0 MODULE_MOD_HARDCORE=0
MODULE_MOD_HARDCORE_MAKGORA=0 MODULE_MOD_HARDCORE_MAKGORA=0
@@ -585,32 +472,21 @@ MODULE_MOD_HIGH_RISK_SYSTEM=0
MODULE_MOD_HUNTER_PET_STORAGE=0 MODULE_MOD_HUNTER_PET_STORAGE=0
MODULE_MOD_IMPROVED_BANK=0 MODULE_MOD_IMPROVED_BANK=0
MODULE_MOD_INCREMENT_CACHE_VERSION=0 MODULE_MOD_INCREMENT_CACHE_VERSION=0
MODULE_MOD_INDIVIDUAL_XP=0
MODULE_MOD_INFLUXDB=0
MODULE_MOD_INSTANCE_TOOLS=0
MODULE_MOD_IP2NATION=0 MODULE_MOD_IP2NATION=0
MODULE_MOD_IP_TRACKER=0 MODULE_MOD_IP_TRACKER=0
MODULE_MOD_ITEMLEVEL=0
MODULE_MOD_ITEM_UPGRADE=0 MODULE_MOD_ITEM_UPGRADE=0
MODULE_MOD_JUNK_TO_GOLD=0 MODULE_MOD_JUNK_TO_GOLD=0
MODULE_MOD_LEARNSPELLS=0 MODULE_MOD_LEARNSPELLS=0
MODULE_MOD_LEECH=0
MODULE_MOD_LEVEL_15_BOOST=0
MODULE_MOD_LEVEL_ONE_MOUNTS=0 MODULE_MOD_LEVEL_ONE_MOUNTS=0
MODULE_MOD_LEVEL_REWARDS=0
MODULE_MOD_LOGIN_REWARDS=0
MODULE_MOD_LOW_LEVEL_ARENA=0 MODULE_MOD_LOW_LEVEL_ARENA=0
MODULE_MOD_LOW_LEVEL_RBG=0 MODULE_MOD_LOW_LEVEL_RBG=0
MODULE_MOD_MISSING_OBJECTIVES=0 MODULE_MOD_MISSING_OBJECTIVES=0
MODULE_MOD_MONEY_FOR_KILLS=0 MODULE_MOD_MONEY_FOR_KILLS=0
MODULE_MOD_MOUNTS_ON_ACCOUNT=0 MODULE_MOD_MOUNTS_ON_ACCOUNT=0
MODULE_MOD_MOUNT_REQUIREMENTS=0 MODULE_MOD_MOUNT_REQUIREMENTS=0
MODULE_MOD_MULTI_VENDOR=0
MODULE_MOD_MYTHIC_PLUS=0 MODULE_MOD_MYTHIC_PLUS=0
MODULE_MOD_NOCLIP=0
MODULE_MOD_NORDF=0 MODULE_MOD_NORDF=0
MODULE_MOD_NOTIFY_MUTED=0 MODULE_MOD_NOTIFY_MUTED=0
MODULE_MOD_NO_FARMING=0
MODULE_MOD_NO_HEARTHSTONE_COOLDOWN=0 MODULE_MOD_NO_HEARTHSTONE_COOLDOWN=0
MODULE_MOD_NPC_ALL_MOUNTS=0 MODULE_MOD_NPC_ALL_MOUNTS=0
MODULE_MOD_NPC_CODEBOX=0 MODULE_MOD_NPC_CODEBOX=0
@@ -620,90 +496,66 @@ MODULE_MOD_NPC_PROMOTION=0
MODULE_MOD_NPC_SERVICES=0 MODULE_MOD_NPC_SERVICES=0
MODULE_MOD_NPC_SPECTATOR=0 MODULE_MOD_NPC_SPECTATOR=0
MODULE_MOD_NPC_SUBCLASS=0 MODULE_MOD_NPC_SUBCLASS=0
MODULE_MOD_OBJSCALE=0
MODULE_MOD_OLLAMA_BOT_BUDDY=0 MODULE_MOD_OLLAMA_BOT_BUDDY=0
MODULE_MOD_ONY_NAXX_LOGOUT_TELEPORT=0 MODULE_MOD_ONY_NAXX_LOGOUT_TELEPORT=0
MODULE_MOD_PEACEKEEPER=0 MODULE_MOD_PEACEKEEPER=0
MODULE_MOD_PETEQUIP=0 MODULE_MOD_PETEQUIP=0
MODULE_MOD_PREMIUM=0
MODULE_MOD_PREMIUM_LIB=0
MODULE_MOD_PROFESSION_EXPERIENCE=0 MODULE_MOD_PROFESSION_EXPERIENCE=0
MODULE_MOD_PROFSPECS=0
MODULE_MOD_PTR_TEMPLATE=0 MODULE_MOD_PTR_TEMPLATE=0
MODULE_MOD_PVPSCRIPT=0
MODULE_MOD_PVPSTATS_ANNOUNCER=0 MODULE_MOD_PVPSTATS_ANNOUNCER=0
MODULE_MOD_PVP_ZONES=0 MODULE_MOD_PVP_ZONES=0
MODULE_MOD_QUEST_LOOT_PARTY=0 MODULE_MOD_QUEST_LOOT_PARTY=0
MODULE_MOD_QUEST_STATUS=0
MODULE_MOD_QUEUE_LIST_CACHE=0 MODULE_MOD_QUEUE_LIST_CACHE=0
MODULE_MOD_QUICKBALANCE=0
MODULE_MOD_QUICK_RESPAWN=0 MODULE_MOD_QUICK_RESPAWN=0
MODULE_MOD_RACIAL_TRAIT_SWAP=0 MODULE_MOD_RACIAL_TRAIT_SWAP=0
MODULE_MOD_RARE_DROPS=0
MODULE_MOD_RDF_EXPANSION=0 MODULE_MOD_RDF_EXPANSION=0
MODULE_MOD_REAL_ONLINE=0 MODULE_MOD_REAL_ONLINE=0
MODULE_MOD_RECRUIT_FRIEND=0 MODULE_MOD_RECRUIT_FRIEND=0
MODULE_MOD_REFORGING=0 MODULE_MOD_REFORGING=0
MODULE_MOD_RESET_RAID_COOLDOWNS=0 MODULE_MOD_RESET_RAID_COOLDOWNS=0
MODULE_MOD_REWARD_PLAYED_TIME_IMPROVED=0 MODULE_MOD_REWARD_PLAYED_TIME_IMPROVED=0
MODULE_MOD_REWARD_SHOP=0
MODULE_MOD_SELL_ITEMS=0 MODULE_MOD_SELL_ITEMS=0
MODULE_MOD_SETXPBAR=0 MODULE_MOD_SETXPBAR=0
MODULE_MOD_SHARE_MOUNTS=0
MODULE_MOD_SPAWNPOINTS=0
MODULE_MOD_SPEC_REWARD=0
MODULE_MOD_SPELLREGULATOR=0
MODULE_MOD_SPONSORSHIP=0
MODULE_MOD_STARTER_GUILD=0 MODULE_MOD_STARTER_GUILD=0
MODULE_MOD_STARTER_WANDS=0 MODULE_MOD_STARTER_WANDS=0
MODULE_MOD_STARTING_PET=0
MODULE_MOD_STREAMS=0 MODULE_MOD_STREAMS=0
MODULE_MOD_SWIFT_TRAVEL_FORM=0 MODULE_MOD_SWIFT_TRAVEL_FORM=0
MODULE_MOD_TALENTBUTTON=0 MODULE_MOD_TALENTBUTTON=0
MODULE_MOD_TRADE_ITEMS_FILTER=0
MODULE_MOD_TREASURE=0 MODULE_MOD_TREASURE=0
MODULE_MOD_TRIAL_OF_FINALITY=0
MODULE_MOD_VANILLA_NAXXRAMAS=0 MODULE_MOD_VANILLA_NAXXRAMAS=0
MODULE_MOD_WARLOCK_PET_RENAME=0 MODULE_MOD_WARLOCK_PET_RENAME=0
MODULE_MOD_WEAPON_VISUAL=0 MODULE_MOD_WEAPON_VISUAL=0
MODULE_MOD_WEEKENDBONUS=0 MODULE_MOD_WEEKENDBONUS=0
MODULE_MOD_WEEKEND_XP=0 MODULE_MOD_WEEKEND_XP=0
MODULE_MOD_WHOLOGGED=0
MODULE_MORZA_ISLAND_ARAXIA_SERVER=0 MODULE_MORZA_ISLAND_ARAXIA_SERVER=0
MODULE_MPQ_TOOLS_OSX=0 MODULE_MPQ_TOOLS_OSX=0
MODULE_MYSQL_TOOLS=0 MODULE_MYSQL_TOOLS=0
MODULE_NODEROUTER=0
MODULE_OPENPROJECTS=0 MODULE_OPENPROJECTS=0
MODULE_PLAYERTELEPORT=0
MODULE_PORTALS_IN_ALL_CAPITALS=0 MODULE_PORTALS_IN_ALL_CAPITALS=0
MODULE_PRESTIGE=0
MODULE_PRESTIGIOUS=0
MODULE_PVPSTATS=0 MODULE_PVPSTATS=0
MODULE_RAIDTELEPORTER=0
MODULE_RECACHE=0 MODULE_RECACHE=0
MODULE_RECYCLEDITEMS=0
MODULE_REWARD_SYSTEM=0
MODULE_SAHTOUTCMS=0 MODULE_SAHTOUTCMS=0
MODULE_SERVER_STATUS=0
MODULE_SETXPBAR=0 MODULE_SETXPBAR=0
MODULE_SPELLSCRIPT_REFACTOR_TOOL=0 MODULE_SPELLSCRIPT_REFACTOR_TOOL=0
MODULE_SQL_NPC_TELEPORTER=0 MODULE_SQL_NPC_TELEPORTER=0
MODULE_STATBOOSTERREROLLER=0
MODULE_STRAPI_AZEROTHCORE=0 MODULE_STRAPI_AZEROTHCORE=0
MODULE_TBC_RAID_HP_RESTORATION=0 MODULE_TBC_RAID_HP_RESTORATION=0
MODULE_TELEGRAM_AUTOMATED_DB_BACKUP=0 MODULE_TELEGRAM_AUTOMATED_DB_BACKUP=0
MODULE_TOOL_TC_MIGRATION=0 MODULE_TOOL_TC_MIGRATION=0
MODULE_TRANSMOG_ADDONS=0 MODULE_TRANSMOG_ADDONS=0
MODULE_UPDATE_MOB_LEVEL_TO_PLAYER_AND_RANDOM_ITEM_STATS=0
MODULE_UPDATE_MODULE_CONFS=0 MODULE_UPDATE_MODULE_CONFS=0
MODULE_WEB_CHARACTER_MIGRATION_TOOL=0 MODULE_WEB_CHARACTER_MIGRATION_TOOL=0
MODULE_WEEKLY_ARMOR_VENDOR_BLACK_MARKET=0 MODULE_WEEKLY_ARMOR_VENDOR_BLACK_MARKET=0
MODULE_WORLD_BOSS_RANK=0
MODULE_WOWDATABASEEDITOR=0 MODULE_WOWDATABASEEDITOR=0
MODULE_WOWLAUNCHER_DELPHI=0 MODULE_WOWLAUNCHER_DELPHI=0
MODULE_WOWSIMS_TO_COMMANDS=0 MODULE_WOWSIMS_TO_COMMANDS=0
MODULE_WOW_CLIENT_PATCHER=0
MODULE_WOW_ELUNA_TS_MODULE=0 MODULE_WOW_ELUNA_TS_MODULE=0
MODULE_WOW_SERVER_RELAY=0 MODULE_WOW_SERVER_RELAY=0
MODULE_WOW_STATISTICS=0
MODULE_WRATH_OF_THE_VANILLA=0 MODULE_WRATH_OF_THE_VANILLA=0
MODULE_MOD_BOTS_LOGIN_FIX=0
MODULE_MOD_MATERIAL_BANK=0
MODULE_MOD_PROGRESSION_BLIZZLIKE=0
MODULE_MOD_PYTHON_ENGINE=0
MODULE_WRATH_OF_THE_VANILLA_V2=0
MODULE_DUELS=0
MODULE_WOW_CORE=0

248
.github/workflows/build-and-publish.yml vendored Normal file
View File

@@ -0,0 +1,248 @@
name: Build and Publish
# This workflow builds AzerothCore with configurable module profiles
# and publishes profile-tagged Docker images to Docker Hub for easy deployment.
#
# Default Profile: RealmMaster (32 modules including playerbots, transmog, solo-lfg, eluna, etc.)
# Available Profiles: RealmMaster, suggested-modules, all-modules, playerbots-only, or custom
# Profile Configuration: See config/module-profiles/
# Documentation: See docs/CICD.md
#
# Published Image Tags:
# - authserver-{profile}-latest (e.g., authserver-realmmaster-latest)
# - authserver-{profile}-YYYYMMDD (e.g., authserver-realmmaster-20260109)
# - authserver-latest (generic tag, defaults to RealmMaster)
# - worldserver-{profile}-latest
# - worldserver-{profile}-YYYYMMDD
# - worldserver-latest (generic tag, defaults to RealmMaster)
on:
schedule:
# Run nightly at 2 AM UTC
- cron: '0 2 * * *'
workflow_dispatch:
inputs:
module_profile:
description: 'Module profile to build (e.g., RealmMaster, suggested-modules, all-modules)'
required: false
type: string
default: 'RealmMaster'
force_rebuild:
description: 'Force rebuild even if no changes detected'
required: false
type: boolean
default: false
jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Git
run: |
# Configure git for module repository cloning
git config --global user.name "GitHub Actions Bot"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git --version
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Prepare build environment
env:
TERM: xterm
run: |
# Determine which module profile to use
if [ "${{ github.event_name }}" = "schedule" ]; then
MODULE_PROFILE="RealmMaster"
else
MODULE_PROFILE="${{ github.event.inputs.module_profile }}"
MODULE_PROFILE="${MODULE_PROFILE:-RealmMaster}"
fi
echo "📋 Using module profile: ${MODULE_PROFILE}"
echo "🔧 Running setup.sh to generate proper .env file..."
# Use setup.sh to generate .env with proper configuration
# Benefits of this approach:
# - Uses the same setup logic as local builds (consistency)
# - Handles all path variables correctly (no manual sed patching needed)
# - Automatically determines source variant (standard vs playerbots)
# - Applies module profile and dependencies correctly
# - Centralizes configuration logic in one place (setup.sh)
./setup.sh \
--non-interactive \
--module-config "${MODULE_PROFILE}" \
--deployment-type local \
--force
echo "✅ Environment configuration generated successfully"
# Extract values for GitHub environment
PROJECT_NAME=$(grep '^COMPOSE_PROJECT_NAME=' .env | cut -d'=' -f2 | tr -d '\r' | sed 's/[[:space:]]*#.*//' | sed 's/[[:space:]]*$//')
echo "PROJECT_NAME=${PROJECT_NAME}" >> $GITHUB_ENV
# Store profile name for image tagging (lowercase, replace underscores with hyphens)
PROFILE_TAG=$(echo "${MODULE_PROFILE}" | tr '[:upper:]' '[:lower:]' | tr '_' '-')
echo "PROFILE_TAG=${PROFILE_TAG}" >> $GITHUB_ENV
echo "MODULE_PROFILE=${MODULE_PROFILE}" >> $GITHUB_ENV
# Count enabled modules
MODULE_COUNT=$(grep -c '^MODULE_.*=1' .env || echo "0")
echo "MODULE_COUNT=${MODULE_COUNT}" >> $GITHUB_ENV
# Display configuration summary
echo ""
echo "📊 Build Configuration Summary:"
echo " Project: ${PROJECT_NAME}"
echo " Profile: ${MODULE_PROFILE}"
echo " Profile Tag: ${PROFILE_TAG}"
echo " Modules: ${MODULE_COUNT} enabled"
echo ""
echo "Enabled modules (first 10):"
grep '^MODULE_.*=1' .env | head -10 || true
echo ""
# Show key paths for verification
echo "📂 Key Paths:"
grep '^STORAGE_PATH_LOCAL=' .env || echo " STORAGE_PATH_LOCAL not found"
grep '^MODULES_REBUILD_SOURCE_PATH=' .env || echo " MODULES_REBUILD_SOURCE_PATH not found"
grep '^STACK_SOURCE_VARIANT=' .env || echo " STACK_SOURCE_VARIANT not found"
echo ""
# Verify all Docker images are configured
echo "🐳 Docker Images (that we build and push):"
grep -E '^AC_AUTHSERVER_IMAGE_PLAYERBOTS=' .env || echo " AC_AUTHSERVER_IMAGE_PLAYERBOTS not found"
grep -E '^AC_WORLDSERVER_IMAGE_PLAYERBOTS=' .env || echo " AC_WORLDSERVER_IMAGE_PLAYERBOTS not found"
grep -E '^AC_AUTHSERVER_IMAGE_MODULES=' .env || echo " AC_AUTHSERVER_IMAGE_MODULES not found"
grep -E '^AC_WORLDSERVER_IMAGE_MODULES=' .env || echo " AC_WORLDSERVER_IMAGE_MODULES not found"
- name: Cache Go build cache
uses: actions/cache@v4
with:
path: .gocache
key: ${{ runner.os }}-gocache-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-gocache-
- name: Cache local storage
uses: actions/cache@v4
with:
path: local-storage/source
key: ${{ runner.os }}-source-${{ github.sha }}
restore-keys: |
${{ runner.os }}-source-
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Run build
run: |
# The build.sh script will automatically:
# 1. Generate module state from the enabled modules in .env
# 2. Set up the AzerothCore source repository
# 3. Fetch and clone all enabled module repositories from GitHub
# 4. Stage modules to the source directory
# 5. Compile AzerothCore with all modules
# 6. Tag the resulting Docker images
BUILD_ARGS="--yes"
# Add force flag if manually triggered with force_rebuild
if [ "${{ github.event.inputs.force_rebuild }}" = "true" ]; then
BUILD_ARGS="${BUILD_ARGS} --force"
fi
echo "🔨 Starting build process with ${BUILD_ARGS}..."
echo "This will fetch and build all ${MODULE_COUNT} enabled modules from the ${MODULE_PROFILE} profile"
./build.sh ${BUILD_ARGS}
- name: Tag images for Docker Hub
run: |
DATE_TAG=$(date +%Y%m%d)
# Tag authserver images with profile name
docker tag ${PROJECT_NAME}:authserver-modules-latest \
${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:authserver-${PROFILE_TAG}-latest
docker tag ${PROJECT_NAME}:authserver-modules-latest \
${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:authserver-${PROFILE_TAG}-${DATE_TAG}
# Also tag as generic 'latest' for backward compatibility
docker tag ${PROJECT_NAME}:authserver-modules-latest \
${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:authserver-latest
# Tag worldserver images with profile name
docker tag ${PROJECT_NAME}:worldserver-modules-latest \
${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:worldserver-${PROFILE_TAG}-latest
docker tag ${PROJECT_NAME}:worldserver-modules-latest \
${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:worldserver-${PROFILE_TAG}-${DATE_TAG}
# Also tag as generic 'latest' for backward compatibility
docker tag ${PROJECT_NAME}:worldserver-modules-latest \
${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:worldserver-latest
echo "Tagged images with profile '${PROFILE_TAG}' and date '${DATE_TAG}'"
- name: Push images to Docker Hub
run: |
DATE_TAG=$(date +%Y%m%d)
# Push authserver images (all tags)
docker push ${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:authserver-${PROFILE_TAG}-latest
docker push ${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:authserver-${PROFILE_TAG}-${DATE_TAG}
docker push ${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:authserver-latest
# Push worldserver images (all tags)
docker push ${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:worldserver-${PROFILE_TAG}-latest
docker push ${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:worldserver-${PROFILE_TAG}-${DATE_TAG}
docker push ${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:worldserver-latest
echo "✅ Pushed all image tags to Docker Hub"
- name: Build summary
run: |
DATE_TAG=$(date +%Y%m%d)
echo "## Build Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "✅ Build completed successfully" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Configuration" >> $GITHUB_STEP_SUMMARY
echo "- **Module Profile**: ${MODULE_PROFILE}" >> $GITHUB_STEP_SUMMARY
echo "- **Enabled Modules**: ${MODULE_COUNT}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "<details>" >> $GITHUB_STEP_SUMMARY
echo "<summary>View enabled modules</summary>" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
grep '^MODULE_.*=1' .env | sed 's/=1//' || true >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
echo "</details>" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Published Images" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "#### Profile-Specific Tags" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:authserver-${PROFILE_TAG}-latest\`" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:authserver-${PROFILE_TAG}-${DATE_TAG}\`" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:worldserver-${PROFILE_TAG}-latest\`" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:worldserver-${PROFILE_TAG}-${DATE_TAG}\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "#### Generic Tags (backward compatibility)" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:authserver-latest\`" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ secrets.DOCKERHUB_USERNAME }}/${PROJECT_NAME}:worldserver-latest\`" >> $GITHUB_STEP_SUMMARY

246
.github/workflows/create-release.yml vendored Normal file
View File

@@ -0,0 +1,246 @@
name: Create Release
on:
workflow_dispatch:
inputs:
version:
description: 'Release version (e.g., v1.0.0)'
required: true
type: string
profile:
description: 'Module profile for this release'
required: false
type: string
default: 'RealmMaster'
prerelease:
description: 'Mark as pre-release'
required: false
type: boolean
default: false
jobs:
create-release:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Prepare release variables
run: |
VERSION="${{ github.event.inputs.version }}"
PROFILE="${{ github.event.inputs.profile }}"
PROFILE_TAG=$(echo "${PROFILE}" | tr '[:upper:]' '[:lower:]' | tr '_' '-')
echo "VERSION=${VERSION}" >> $GITHUB_ENV
echo "PROFILE=${PROFILE}" >> $GITHUB_ENV
echo "PROFILE_TAG=${PROFILE_TAG}" >> $GITHUB_ENV
# Get build date from Docker Hub image (or use current date)
BUILD_DATE=$(date +%Y%m%d)
echo "BUILD_DATE=${BUILD_DATE}" >> $GITHUB_ENV
# Get AzerothCore commit from local-storage if available
if [ -d "local-storage/source/azerothcore-playerbots" ]; then
ACORE_COMMIT=$(cd local-storage/source/azerothcore-playerbots && git rev-parse --short HEAD)
else
ACORE_COMMIT="unknown"
fi
echo "ACORE_COMMIT=${ACORE_COMMIT}" >> $GITHUB_ENV
- name: Read module list from profile
run: |
PROFILE_FILE="config/module-profiles/${PROFILE}.json"
if [ ! -f "$PROFILE_FILE" ]; then
echo "ERROR: Profile file not found: $PROFILE_FILE"
exit 1
fi
# Extract module count
MODULE_COUNT=$(python3 -c "import json; data=json.load(open('$PROFILE_FILE')); print(len(data.get('modules', [])))")
echo "MODULE_COUNT=${MODULE_COUNT}" >> $GITHUB_ENV
# Extract modules for release notes
python3 -c "import json; data=json.load(open('$PROFILE_FILE')); print('\n'.join(['- ' + m for m in data.get('modules', [])]))" > modules.txt
- name: Create deployment package
run: |
PACKAGE_NAME="azerothcore-realmmaster-${VERSION}-${PROFILE_TAG}"
mkdir -p "${PACKAGE_NAME}"
# Copy essential deployment files
cp .env.prebuilt "${PACKAGE_NAME}/.env.prebuilt"
cp docker-compose.yml "${PACKAGE_NAME}/docker-compose.yml"
cp deploy.sh "${PACKAGE_NAME}/deploy.sh"
cp status.sh "${PACKAGE_NAME}/status.sh"
cp cleanup.sh "${PACKAGE_NAME}/cleanup.sh"
cp README.md "${PACKAGE_NAME}/README.md"
# Copy scripts directory
cp -r scripts "${PACKAGE_NAME}/scripts"
# Copy config directory
cp -r config "${PACKAGE_NAME}/config"
# Copy docs directory
cp -r docs "${PACKAGE_NAME}/docs"
# Create a quick start guide specific to this release
{
echo "# Quick Start - AzerothCore RealmMaster ${VERSION}"
echo ""
echo "## Module Profile: ${PROFILE}"
echo "${MODULE_COUNT} modules included"
echo ""
echo "## Docker Images"
echo "This release uses the following pre-built images:"
echo "- \`\${DOCKERHUB_USERNAME}/azerothcore-realmmaster:authserver-${PROFILE_TAG}-${BUILD_DATE}\`"
echo "- \`\${DOCKERHUB_USERNAME}/azerothcore-realmmaster:worldserver-${PROFILE_TAG}-${BUILD_DATE}\`"
echo ""
echo "Or use the latest tags:"
echo "- \`\${DOCKERHUB_USERNAME}/azerothcore-realmmaster:authserver-${PROFILE_TAG}-latest\`"
echo "- \`\${DOCKERHUB_USERNAME}/azerothcore-realmmaster:worldserver-${PROFILE_TAG}-latest\`"
echo ""
echo "## Installation"
echo ""
echo "1. **Edit .env.prebuilt**:"
echo " \`\`\`bash"
echo " nano .env.prebuilt"
echo " # Set: DOCKERHUB_USERNAME=uprightbass360"
echo " \`\`\`"
echo ""
echo "2. **Rename to .env**:"
echo " \`\`\`bash"
echo " mv .env.prebuilt .env"
echo " \`\`\`"
echo ""
echo "3. **Deploy**:"
echo " \`\`\`bash"
echo " chmod +x deploy.sh status.sh cleanup.sh"
echo " ./deploy.sh"
echo " \`\`\`"
echo ""
echo "4. **Check status**:"
echo " \`\`\`bash"
echo " ./status.sh"
echo " \`\`\`"
echo ""
echo "## Documentation"
echo "- [Pre-Built Images Guide](docs/PREBUILT_IMAGES.md)"
echo "- [Getting Started](docs/GETTING_STARTED.md)"
echo "- [Troubleshooting](docs/TROUBLESHOOTING.md)"
echo ""
echo "## Support"
echo "- GitHub Issues: https://github.com/uprightbass360/AzerothCore-RealmMaster/issues"
echo "- AzerothCore Discord: https://discord.gg/gkt4y2x"
} > "${PACKAGE_NAME}/QUICKSTART.md"
# Make scripts executable
chmod +x "${PACKAGE_NAME}/deploy.sh"
chmod +x "${PACKAGE_NAME}/status.sh"
chmod +x "${PACKAGE_NAME}/cleanup.sh"
# Create zip archive
zip -r "${PACKAGE_NAME}.zip" "${PACKAGE_NAME}"
echo "PACKAGE_NAME=${PACKAGE_NAME}" >> $GITHUB_ENV
- name: Generate release notes
run: |
{
echo "# AzerothCore RealmMaster ${VERSION} - ${PROFILE} Profile"
echo ""
echo "## 🎯 Module Profile: ${PROFILE}"
echo "${MODULE_COUNT} modules included"
echo ""
echo "## 📦 Docker Images"
echo ""
echo "Pull these pre-built images from Docker Hub:"
echo ""
echo "**Date-specific (recommended for production)**:"
echo "\`\`\`bash"
echo "docker pull \${DOCKERHUB_USERNAME}/azerothcore-realmmaster:authserver-${PROFILE_TAG}-${BUILD_DATE}"
echo "docker pull \${DOCKERHUB_USERNAME}/azerothcore-realmmaster:worldserver-${PROFILE_TAG}-${BUILD_DATE}"
echo "\`\`\`"
echo ""
echo "**Latest (auto-updated nightly)**:"
echo "\`\`\`bash"
echo "docker pull \${DOCKERHUB_USERNAME}/azerothcore-realmmaster:authserver-${PROFILE_TAG}-latest"
echo "docker pull \${DOCKERHUB_USERNAME}/azerothcore-realmmaster:worldserver-${PROFILE_TAG}-latest"
echo "\`\`\`"
echo ""
echo "## 🚀 Quick Start"
echo ""
echo "\`\`\`bash"
echo "# Download and extract"
echo "wget https://github.com/uprightbass360/AzerothCore-RealmMaster/releases/download/${VERSION}/${PACKAGE_NAME}.zip"
echo "unzip ${PACKAGE_NAME}.zip"
echo "cd ${PACKAGE_NAME}"
echo ""
echo "# Configure Docker Hub username"
echo "nano .env.prebuilt"
echo "# Set: DOCKERHUB_USERNAME=uprightbass360"
echo ""
echo "# Deploy"
echo "mv .env.prebuilt .env"
echo "./deploy.sh"
echo "\`\`\`"
echo ""
echo "Full documentation in \`docs/PREBUILT_IMAGES.md\`"
echo ""
echo "## 📋 Included Modules"
echo ""
cat modules.txt
echo ""
echo "## 📊 Build Information"
echo ""
echo "- **Built**: ${BUILD_DATE}"
echo "- **AzerothCore Commit**: ${ACORE_COMMIT}"
echo "- **Source Variant**: playerbots (for MODULE_PLAYERBOTS support)"
echo "- **Profile**: ${PROFILE}"
echo "- **Module Count**: ${MODULE_COUNT}"
echo ""
echo "## 📖 Documentation"
echo ""
echo "Full documentation available in the \`docs/\` directory of the release package:"
echo "- [Pre-Built Images Guide](https://github.com/uprightbass360/AzerothCore-RealmMaster/blob/${VERSION}/docs/PREBUILT_IMAGES.md)"
echo "- [Getting Started Guide](https://github.com/uprightbass360/AzerothCore-RealmMaster/blob/${VERSION}/docs/GETTING_STARTED.md)"
echo "- [Module Catalog](https://github.com/uprightbass360/AzerothCore-RealmMaster/blob/${VERSION}/docs/MODULES.md)"
echo "- [Troubleshooting](https://github.com/uprightbass360/AzerothCore-RealmMaster/blob/${VERSION}/docs/TROUBLESHOOTING.md)"
echo ""
echo "## 🐛 Known Issues"
echo ""
echo "None at this time. Report issues at: https://github.com/uprightbass360/AzerothCore-RealmMaster/issues"
echo ""
echo "## 💬 Support"
echo ""
echo "- **GitHub Issues**: https://github.com/uprightbass360/AzerothCore-RealmMaster/issues"
echo "- **AzerothCore Discord**: https://discord.gg/gkt4y2x"
echo "- **Documentation**: https://github.com/uprightbass360/AzerothCore-RealmMaster/tree/${VERSION}/docs"
} > release_notes.md
- name: Create GitHub Release
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ env.VERSION }}
name: "RealmMaster ${{ env.VERSION }} - ${{ env.PROFILE }} Profile"
body_path: release_notes.md
files: |
${{ env.PACKAGE_NAME }}.zip
prerelease: ${{ github.event.inputs.prerelease }}
draft: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Release summary
run: |
echo "## Release Created Successfully! 🎉" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Version**: ${{ env.VERSION }}" >> $GITHUB_STEP_SUMMARY
echo "**Profile**: ${{ env.PROFILE }}" >> $GITHUB_STEP_SUMMARY
echo "**Modules**: ${{ env.MODULE_COUNT }}" >> $GITHUB_STEP_SUMMARY
echo "**Package**: ${{ env.PACKAGE_NAME }}.zip" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "📦 Release available at:" >> $GITHUB_STEP_SUMMARY
echo "https://github.com/${{ github.repository }}/releases/tag/${{ env.VERSION }}" >> $GITHUB_STEP_SUMMARY

View File

@@ -17,13 +17,31 @@ jobs:
with: with:
python-version: '3.11' python-version: '3.11'
- name: Configure git
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
- name: Update manifest from GitHub topics - name: Update manifest from GitHub topics
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: | run: |
python3 scripts/python/update_module_manifest.py --log python3 scripts/python/update_module_manifest.py --log
- name: Check for changes
id: changes
run: |
if git diff --quiet; then
echo "changed=false" >> $GITHUB_OUTPUT
echo "No changes detected in manifest or template files"
else
echo "changed=true" >> $GITHUB_OUTPUT
echo "Changes detected:"
git diff --name-only
fi
- name: Create Pull Request with changes - name: Create Pull Request with changes
if: steps.changes.outputs.changed == 'true'
uses: peter-evans/create-pull-request@v5 uses: peter-evans/create-pull-request@v5
with: with:
commit-message: 'chore: sync module manifest' commit-message: 'chore: sync module manifest'

44
.gitignore vendored
View File

@@ -1,23 +1,27 @@
database-import/*.sql # ===================================
database-import/*.sql.gz # Environment & Configuration
database-import/*/ # ===================================
database-import/ImportBackup*/ .env
source/* .claude/
local-data-tools/ .mcp*/
changelogs/
# ===================================
# Storage & Data Directories
# ===================================
storage/ storage/
local-storage/ local-storage/
.claude/ db_*/
images/
node_modules/ # ===================================
.mcp*/ # Build Artifacts & Cache
scripts/__pycache__/* # ===================================
scripts/python/__pycache__/*
.env
package-lock.json
package.json
todo.md
.gocache/ .gocache/
.module-ledger/ statusdash
deploy.log scripts/__pycache__/*
statusdash scripts/bash/__pycache__/*
scripts/python/__pycache__/*
# ===================================
# Logs & Runtime State
# ===================================
deploy.log

View File

@@ -1,87 +0,0 @@
# Changelog
## [2025-11-09] - Recent Changes
### ✨ Features
#### Backup System Enhancements
- **Manual Backup Support**: Added `manual-backup.sh` script (92 lines) enabling on-demand database backups through the ac-backup container
- **Backup Permission Fixes**: Resolved Docker volume permission issues with backup operations
- **Container User Configuration**: Backup operations now run as proper container user to avoid permission conflicts
#### Remote Deployment
- **Auto Deploy Option**: Added remote auto-deployment functionality to `deploy.sh` (36 additional lines) for automated server provisioning
#### Configuration Management System
- **Database/Config Import**: Major new feature with 1,405+ lines of code across 15 files
- Added `apply-config.py` (323 lines) for dynamic server configuration
- Created `configure-server.sh` (162 lines) for server setup automation
- Implemented `import-database-files.sh` (68 lines) for database initialization
- Added `parse-config-presets.py` (92 lines) for configuration templating
- **Configuration Presets**: 5 new server preset configurations
- `blizzlike.conf` - Authentic Blizzard-like experience
- `casual-pve.conf` - Relaxed PvE gameplay
- `fast-leveling.conf` - Accelerated character progression
- `hardcore-pvp.conf` - Competitive PvP settings
- `none.conf` - Minimal configuration baseline
- **Dynamic Server Overrides**: `server-overrides.conf` (134 lines) for customizable server parameters
- **Comprehensive Config Documentation**: `CONFIG_MANAGEMENT.md` (279 lines) detailing the entire configuration system
#### Infrastructure Improvements
- **MySQL Exposure Toggle**: Optional MySQL port exposure for external database access
- **Client Data Management**: Automatic client data detection, download, and binding with version detection
- **Dynamic Docker Overrides**: Flexible compose override system for modular container configurations
- **Module Profile System**: Structured module management with preset profiles
### 🏗️ Refactoring
#### Script Organization
- **Directory Restructure**: Reorganized all scripts into `scripts/bash/` and `scripts/python/` directories (40 files moved/modified)
- **Project Naming**: Added centralized project name management with `project_name.sh`
- **Module Manifest Rename**: Moved `modules.json``module-manifest.json` for clarity
### 🐛 Bug Fixes
#### Container Improvements
- **Client Data Container**: Enhanced with 7zip support, root access during extraction, and ownership fixes
- **Permission Resolution**: Fixed file ownership issues in client data extraction process
- **Path Updates**: Corrected deployment paths and script references after reorganization
### 📚 Documentation
#### Major Documentation Overhaul
- **Modular Documentation**: Split massive README into focused documents (1,500+ lines reorganized)
- `docs/GETTING_STARTED.md` (467 lines) - Setup and initial configuration
- `docs/MODULES.md` (264 lines) - Module management and customization
- `docs/SCRIPTS.md` (404 lines) - Script reference and automation
- `docs/ADVANCED.md` (207 lines) - Advanced configuration topics
- `docs/TROUBLESHOOTING.md` (127 lines) - Common issues and solutions
- **README Streamlining**: Reduced main README from 1,200+ to focused overview
- **Script Documentation**: Updated script references and usage examples throughout
### 🔧 Technical Changes
#### Development Experience
- **Setup Enhancements**: Improved `setup.sh` with better error handling and configuration options (66 lines added)
- **Status Monitoring**: Enhanced `status.sh` with better container and service monitoring
- **Build Process**: Updated build scripts with new directory structure and module handling
- **Cleanup Operations**: Improved cleanup scripts with proper path handling
#### DevOps & Deployment
- **Remote Cleanup**: Enhanced remote server cleanup and temporary file management
- **Network Binding**: Improved container networking and port management
- **Import Folder**: Added dedicated import directory structure
- **Development Onboarding**: Streamlined developer setup process
---
### Migration Notes
- Scripts have moved from `scripts/` to `scripts/bash/` and `scripts/python/`
- Module configuration is now in `config/module-manifest.json`
- New environment variables added for MySQL exposure and client data management
- Configuration presets are available in `config/presets/`
### Breaking Changes
- Script paths have changed due to reorganization
- Module manifest file has been renamed
- Some environment variables have been added/modified

View File

@@ -11,11 +11,13 @@ A complete containerized deployment of AzerothCore WoW 3.3.5a (Wrath of the Lich
- [Quick Start](#quick-start) - [Quick Start](#quick-start)
- [What You Get](#what-you-get) - [What You Get](#what-you-get)
- [Getting Started](#getting-started) → **[docs/GETTING_STARTED.md](docs/GETTING_STARTED.md)** - [Getting Started](#getting-started) → **[docs/GETTING_STARTED.md](docs/GETTING_STARTED.md)**
- [Using Pre-Built Images](#using-pre-built-images-no-build-required) → **[docs/PREBUILT_IMAGES.md](docs/PREBUILT_IMAGES.md)**
- [Complete Module Catalog](#complete-module-catalog) → **[docs/MODULES.md](docs/MODULES.md)** - [Complete Module Catalog](#complete-module-catalog) → **[docs/MODULES.md](docs/MODULES.md)**
- [Management & Operations](#management--operations) → **[docs/GETTING_STARTED.md](docs/GETTING_STARTED.md)** - [Management & Operations](#management--operations) → **[docs/GETTING_STARTED.md](docs/GETTING_STARTED.md)**
- [Advanced Configuration](#advanced-configuration) → **[docs/ADVANCED.md](docs/ADVANCED.md)** - [Advanced Configuration](#advanced-configuration) → **[docs/ADVANCED.md](docs/ADVANCED.md)**
- [Custom NPCs Guide](#custom-npcs-guide) → **[docs/NPCS.md](docs/NPCS.md)** - [Custom NPCs Guide](#custom-npcs-guide) → **[docs/NPCS.md](docs/NPCS.md)**
- [Script Reference](#script-reference) → **[docs/SCRIPTS.md](docs/SCRIPTS.md)** - [Script Reference](#script-reference) → **[docs/SCRIPTS.md](docs/SCRIPTS.md)**
- [CI/CD & Pre-Built Images](#cicd--pre-built-images) → **[docs/CICD.md](docs/CICD.md)**
- [Troubleshooting](#troubleshooting) → **[docs/TROUBLESHOOTING.md](docs/TROUBLESHOOTING.md)** - [Troubleshooting](#troubleshooting) → **[docs/TROUBLESHOOTING.md](docs/TROUBLESHOOTING.md)**
- [Credits & Next Steps](#credits--next-steps) - [Credits & Next Steps](#credits--next-steps)
@@ -26,7 +28,7 @@ A complete containerized deployment of AzerothCore WoW 3.3.5a (Wrath of the Lich
### Reccomendations ### Reccomendations
- **Docker** with Docker Compose 2 - **Docker** with Docker Compose 2
- **16GB+ RAM** and **64GB+ storage** - **16GB+ RAM** and **64GB+ storage**
- **Linux/macOS/WSL2** Fully tested with Ubuntu 24.04 and Debian 12 - **Linux/macOS/WSL2** Fully tested with Ubuntu 24.04 - Debian 12 might work but permissions can require manual intervention
### Three Simple Steps ### Three Simple Steps
@@ -45,6 +47,25 @@ cd AzerothCore-RealmMaster
**First deployment takes 30-60 minutes** for database setup and client data download. Subsequent starts are much faster. **First deployment takes 30-60 minutes** for database setup and client data download. Subsequent starts are much faster.
### Using Pre-Built Images (No Build Required!)
Skip the build process and deploy with pre-built Docker images:
```bash
# 1. Clone the repository
git clone https://github.com/uprightbass360/AzerothCore-RealmMaster.git
cd AzerothCore-RealmMaster
# 2. Use pre-built configuration
cp .env.prebuilt .env
# 3. Edit .env and set DOCKERHUB_USERNAME
# 4. Deploy
./deploy.sh
```
Pre-built images include the **RealmMaster profile** (32 modules) and are automatically built nightly. See **[docs/PREBUILT_IMAGES.md](docs/PREBUILT_IMAGES.md)** for details.
See [Getting Started](#getting-started) for detailed walkthrough. See [Getting Started](#getting-started) for detailed walkthrough.
## What You Get ## What You Get
@@ -130,6 +151,13 @@ For diagnostic procedures, common issues, and backup system documentation, see *
This project builds upon: This project builds upon:
- **[AzerothCore](https://github.com/azerothcore/azerothcore-wotlk)** - Core server application - **[AzerothCore](https://github.com/azerothcore/azerothcore-wotlk)** - Core server application
- **[AzerothCore Module Community](https://github.com/azerothcore)** - Enhanced gameplay modules - **[AzerothCore Module Community](https://github.com/azerothcore)** - Enhanced gameplay modules
- **[acore-docker](https://github.com/azerothcore/acore-docker)** - Inspiration for containerized deployment
- **[mod-playerbots](https://github.com/mod-playerbots/azerothcore-wotlk)** - Advanced playerbot functionality
- **All module creators** - Making amazing things every day
### Community & Support
- **[AzerothCore Discord](https://discord.gg/gkt4y2x)** - Join the community for support and discussions
- **[GitHub Issues](https://github.com/uprightbass360/AzerothCore-RealmMaster/issues)** - Report build or deployment issues here
#### Key Features #### Key Features
-**Fully Automated Setup** - Interactive configuration and deployment -**Fully Automated Setup** - Interactive configuration and deployment

View File

@@ -38,6 +38,7 @@ Build AzerothCore with custom modules and create deployment-ready images.
Options: Options:
--yes, -y Auto-confirm all prompts --yes, -y Auto-confirm all prompts
--force Force rebuild even if no changes detected --force Force rebuild even if no changes detected
--force-update Force update source repository to latest commits
--source-path PATH Custom source repository path --source-path PATH Custom source repository path
--skip-source-setup Skip automatic source repository setup --skip-source-setup Skip automatic source repository setup
-h, --help Show this help -h, --help Show this help
@@ -53,6 +54,7 @@ Examples:
./build.sh Interactive build ./build.sh Interactive build
./build.sh --yes Auto-confirm build ./build.sh --yes Auto-confirm build
./build.sh --force Force rebuild regardless of state ./build.sh --force Force rebuild regardless of state
./build.sh --force-update Update source to latest and build
EOF EOF
} }
@@ -60,6 +62,7 @@ while [[ $# -gt 0 ]]; do
case "$1" in case "$1" in
--yes|-y) ASSUME_YES=1; shift;; --yes|-y) ASSUME_YES=1; shift;;
--force) FORCE_REBUILD=1; shift;; --force) FORCE_REBUILD=1; shift;;
--force-update) FORCE_UPDATE=1; shift;;
--source-path) CUSTOM_SOURCE_PATH="$2"; shift 2;; --source-path) CUSTOM_SOURCE_PATH="$2"; shift 2;;
--skip-source-setup) SKIP_SOURCE_SETUP=1; shift;; --skip-source-setup) SKIP_SOURCE_SETUP=1; shift;;
-h|--help) usage; exit 0;; -h|--help) usage; exit 0;;
@@ -240,6 +243,13 @@ ensure_source_repo(){
src_path="${src_path//\/.\//\/}" src_path="${src_path//\/.\//\/}"
if [ -d "$src_path/.git" ]; then if [ -d "$src_path/.git" ]; then
if [ "${FORCE_UPDATE:-0}" = "1" ]; then
info "Force update requested - updating source repository to latest" >&2
if ! (cd "$ROOT_DIR" && ./scripts/bash/setup-source.sh) >&2; then
err "Failed to update source repository" >&2
exit 1
fi
fi
echo "$src_path" echo "$src_path"
return return
fi fi
@@ -540,6 +550,10 @@ stage_modules(){
rm -f "$staging_modules_dir/.modules_state" "$staging_modules_dir/.requires_rebuild" 2>/dev/null || true rm -f "$staging_modules_dir/.modules_state" "$staging_modules_dir/.requires_rebuild" 2>/dev/null || true
fi fi
# Export environment variables needed by module hooks
export STACK_SOURCE_VARIANT="$(read_env STACK_SOURCE_VARIANT "core")"
export MODULES_REBUILD_SOURCE_PATH="$(read_env MODULES_REBUILD_SOURCE_PATH "")"
if ! (cd "$local_modules_dir" && bash "$ROOT_DIR/scripts/bash/manage-modules.sh"); then if ! (cd "$local_modules_dir" && bash "$ROOT_DIR/scripts/bash/manage-modules.sh"); then
err "Module staging failed; aborting build" err "Module staging failed; aborting build"
return 1 return 1

View File

@@ -99,7 +99,36 @@ done
# Get last build time from container metadata # Get last build time from container metadata
get_last_build_time() { get_last_build_time() {
local containers=("ac-worldserver" "ac-authserver") local containers=("ac-worldserver" "ac-authserver")
local images=("azerothcore-stack:worldserver-playerbots" "azerothcore-stack:authserver-playerbots") local images=()
# Require COMPOSE_PROJECT_NAME to be set
if [[ -z "${COMPOSE_PROJECT_NAME:-}" ]]; then
warn "COMPOSE_PROJECT_NAME not set in environment"
return 1
fi
# Use actual image names from environment
# Detect variant to check appropriate images
if [[ "${STACK_IMAGE_MODE:-standard}" == "playerbots" ]] || [[ "${MODULE_PLAYERBOTS:-0}" == "1" ]] || [[ "${PLAYERBOT_ENABLED:-0}" == "1" ]] || [[ "${STACK_SOURCE_VARIANT:-}" == "playerbots" ]]; then
if [[ -z "${AC_WORLDSERVER_IMAGE_PLAYERBOTS:-}" ]] || [[ -z "${AC_AUTHSERVER_IMAGE_PLAYERBOTS:-}" ]]; then
warn "Playerbots mode detected but AC_WORLDSERVER_IMAGE_PLAYERBOTS or AC_AUTHSERVER_IMAGE_PLAYERBOTS not set"
return 1
fi
images=(
"${AC_WORLDSERVER_IMAGE_PLAYERBOTS}"
"${AC_AUTHSERVER_IMAGE_PLAYERBOTS}"
)
else
if [[ -z "${AC_WORLDSERVER_IMAGE:-}" ]] || [[ -z "${AC_AUTHSERVER_IMAGE:-}" ]]; then
warn "Standard mode detected but AC_WORLDSERVER_IMAGE or AC_AUTHSERVER_IMAGE not set"
return 1
fi
images=(
"${AC_WORLDSERVER_IMAGE}"
"${AC_AUTHSERVER_IMAGE}"
)
fi
local latest_date="" local latest_date=""
# Try to get build timestamp from containers and images # Try to get build timestamp from containers and images
@@ -143,7 +172,7 @@ if [[ -n "$SINCE_DATE" ]]; then
DATE_DESC="since $SINCE_DATE" DATE_DESC="since $SINCE_DATE"
else else
# Try to use last build time as default # Try to use last build time as default
LAST_BUILD_DATE=$(get_last_build_time) LAST_BUILD_DATE=$(get_last_build_time 2>/dev/null) || LAST_BUILD_DATE=""
if [[ -n "$LAST_BUILD_DATE" ]]; then if [[ -n "$LAST_BUILD_DATE" ]]; then
SINCE_OPTION="--since=$LAST_BUILD_DATE" SINCE_OPTION="--since=$LAST_BUILD_DATE"
@@ -194,11 +223,17 @@ detect_source_config() {
$VERBOSE && log "Switched to playerbots variant" >&2 $VERBOSE && log "Switched to playerbots variant" >&2
fi fi
# Repository URLs from environment or defaults # Repository URLs from environment (required)
local standard_repo="${ACORE_REPO_STANDARD:-https://github.com/azerothcore/azerothcore-wotlk.git}" local standard_repo="${ACORE_REPO_STANDARD}"
local standard_branch="${ACORE_BRANCH_STANDARD:-master}" local standard_branch="${ACORE_BRANCH_STANDARD}"
local playerbots_repo="${ACORE_REPO_PLAYERBOTS:-https://github.com/mod-playerbots/azerothcore-wotlk.git}" local playerbots_repo="${ACORE_REPO_PLAYERBOTS}"
local playerbots_branch="${ACORE_BRANCH_PLAYERBOTS:-Playerbot}" local playerbots_branch="${ACORE_BRANCH_PLAYERBOTS}"
if [[ -z "$standard_repo" ]] || [[ -z "$standard_branch" ]] || [[ -z "$playerbots_repo" ]] || [[ -z "$playerbots_branch" ]]; then
warn "Repository configuration missing from environment"
warn "Required: ACORE_REPO_STANDARD, ACORE_BRANCH_STANDARD, ACORE_REPO_PLAYERBOTS, ACORE_BRANCH_PLAYERBOTS"
return 1
fi
if [[ "$variant" == "playerbots" ]]; then if [[ "$variant" == "playerbots" ]]; then
echo "$playerbots_repo|$playerbots_branch|$LOCAL_STORAGE_ROOT/source/azerothcore-playerbots" echo "$playerbots_repo|$playerbots_branch|$LOCAL_STORAGE_ROOT/source/azerothcore-playerbots"

View File

@@ -146,8 +146,6 @@ sanitize_project_name(){
project_name::sanitize "$1" project_name::sanitize "$1"
} }
PROJECT_IMAGE_PREFIX="$(sanitize_project_name "${COMPOSE_PROJECT_NAME:-$DEFAULT_PROJECT_NAME}")"
remove_storage_dir(){ remove_storage_dir(){
local path="$1" local path="$1"
if [ -d "$path" ]; then if [ -d "$path" ]; then
@@ -223,8 +221,7 @@ nuclear_cleanup() {
# Remove project images (server/tool images typical to this project) # Remove project images (server/tool images typical to this project)
execute_command "Remove acore images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E '^acore/' | xargs -r docker rmi" execute_command "Remove acore images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E '^acore/' | xargs -r docker rmi"
execute_command "Remove local project images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E '^${PROJECT_IMAGE_PREFIX}:' | xargs -r docker rmi" execute_command "Remove project-specific images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E \"^${PROJECT_NAME}:\" | xargs -r docker rmi"
execute_command "Remove legacy playerbots images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E '^uprightbass360/azerothcore-wotlk-playerbots' | xargs -r docker rmi"
execute_command "Remove tool images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E 'phpmyadmin|uprightbass360/keira3' | xargs -r docker rmi" execute_command "Remove tool images" "docker images --format '{{.Repository}}:{{.Tag}}' | grep -E 'phpmyadmin|uprightbass360/keira3' | xargs -r docker rmi"
# Storage cleanup (preserve backups if requested) # Storage cleanup (preserve backups if requested)

View File

@@ -491,7 +491,7 @@
"block_reason": "Runtime error: SQL error: MODULE_mod-black-market_creature.sql references removed 'StatsCount' column", "block_reason": "Runtime error: SQL error: MODULE_mod-black-market_creature.sql references removed 'StatsCount' column",
"order": 5000, "order": 5000,
"config_cleanup": [], "config_cleanup": [],
"notes": "Disabled due to runtime error: SQL error: MODULE_mod-black-market_creature.sql references removed 'StatsCount' column", "notes": "Disabled due to runtime error: SQL error: MODULE_mod-black-market_creature.sql references removed 'StatsCount' column \nDiscovered via GitHub topic 'azerothcore-module'",
"last_modified": "2025-06-26T14:23:47Z" "last_modified": "2025-06-26T14:23:47Z"
}, },
{ {
@@ -1490,7 +1490,7 @@
"description": "Module for WoW 3.3.5a (AzerothCore \u2013 Playerbots). Tested on Ubuntu.", "description": "Module for WoW 3.3.5a (AzerothCore \u2013 Playerbots). Tested on Ubuntu.",
"type": "cpp", "type": "cpp",
"category": "database", "category": "database",
"notes": "Disabled due to runtime error: MODULE_mod-guild-village_001_creature_template.sql tries to insert duplicate creature ID 987400 (ERROR 1062)", "notes": "Disabled due to runtime error: MODULE_mod-guild-village_001_creature_template.sql tries to insert duplicate creature ID 987400 (ERROR 1062) \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -1581,7 +1581,7 @@
"description": "Module for WoW 3.3.5a (AzerothCore \u2013 Playerbots). Tested on Ubuntu.", "description": "Module for WoW 3.3.5a (AzerothCore \u2013 Playerbots). Tested on Ubuntu.",
"type": "cpp", "type": "cpp",
"category": "database", "category": "database",
"notes": "Disabled due to runtime error: MODULE_mod-instance-tools_Creature.sql tries to insert duplicate creature ID 987456-0 (ERROR 1062)", "notes": "Disabled due to runtime error: MODULE_mod-instance-tools_Creature.sql tries to insert duplicate creature ID 987456-0 (ERROR 1062) \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -1809,7 +1809,7 @@
"description": "Hardcore trial mod for groups of 1-5", "description": "Hardcore trial mod for groups of 1-5",
"type": "cpp", "type": "cpp",
"category": "progression", "category": "progression",
"notes": "DISABLED: no member named 'isEmpty' in 'MapRefMgr'", "notes": "DISABLED: no member named 'isEmpty' in 'MapRefMgr' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -2005,7 +2005,7 @@
"description": "AoE loot module for AzerothCore built from the ground up without loot loss. (No Loot Merging).", "description": "AoE loot module for AzerothCore built from the ground up without loot loss. (No Loot Merging).",
"type": "cpp", "type": "cpp",
"category": "quality-of-life", "category": "quality-of-life",
"notes": "DISABLED: Naming conflict with Item class", "notes": "DISABLED: Naming conflict with Item class \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -2142,7 +2142,7 @@
"description": "Adds Mist of Pandaria's Brawler's Guild to AzerothCore.", "description": "Adds Mist of Pandaria's Brawler's Guild to AzerothCore.",
"type": "cpp", "type": "cpp",
"category": "minigame", "category": "minigame",
"notes": "DISABLED: no matching member function for call to 'DelayEvents'", "notes": "DISABLED: no matching member function for call to 'DelayEvents' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -2188,7 +2188,7 @@
"description": "Mercado negro", "description": "Mercado negro",
"type": "cpp", "type": "cpp",
"category": "database", "category": "database",
"notes": "DISABLED: SQL schema mismatch - StatsCount column doesn't exist", "notes": "DISABLED: SQL schema mismatch - StatsCount column doesn't exist \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -2835,7 +2835,7 @@
"description": "Prestige level system mod for Azerothcore", "description": "Prestige level system mod for Azerothcore",
"type": "cpp", "type": "cpp",
"category": "progression", "category": "progression",
"notes": "DISABLED: 'OnLogin' marked 'override' but does not override", "notes": "DISABLED: 'OnLogin' marked 'override' but does not override \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -2851,7 +2851,7 @@
"description": "Adds extra difficulty modes, heavily inspired by mod-challenge-modes.", "description": "Adds extra difficulty modes, heavily inspired by mod-challenge-modes.",
"type": "cpp", "type": "cpp",
"category": "progression", "category": "progression",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -2897,7 +2897,7 @@
"description": "This module aims to make mounts, companions and heirlooms shared across all characters of an account", "description": "This module aims to make mounts, companions and heirlooms shared across all characters of an account",
"type": "cpp", "type": "cpp",
"category": "account-wide", "category": "account-wide",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3141,7 +3141,7 @@
"description": "An Azeroth Core module to allow the display of the Breaking News section on the character select screen.", "description": "An Azeroth Core module to allow the display of the Breaking News section on the character select screen.",
"type": "cpp", "type": "cpp",
"category": "social", "category": "social",
"notes": "DISABLED: no member named 'StringFormatFmt' in namespace 'Acore'", "notes": "DISABLED: no member named 'StringFormatFmt' in namespace 'Acore' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3172,7 +3172,7 @@
"description": "This WoW-Azerothcore-Mod allows to change spawntimes based on a userdefined or dynamically calculated playerbased factor", "description": "This WoW-Azerothcore-Mod allows to change spawntimes based on a userdefined or dynamically calculated playerbased factor",
"type": "cpp", "type": "cpp",
"category": "progression", "category": "progression",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3188,7 +3188,7 @@
"description": "This mod allows noclip with a command noclip. on / off", "description": "This mod allows noclip with a command noclip. on / off",
"type": "cpp", "type": "cpp",
"category": "admin", "category": "admin",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3249,7 +3249,7 @@
"description": "AzerothCore port (as a Module) of Rochet2's Objscale", "description": "AzerothCore port (as a Module) of Rochet2's Objscale",
"type": "cpp", "type": "cpp",
"category": "progression", "category": "progression",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3311,7 +3311,7 @@
"description": "An AzerothCore module that recycles unwanted items to the auction house.", "description": "An AzerothCore module that recycles unwanted items to the auction house.",
"type": "cpp", "type": "cpp",
"category": "economy", "category": "economy",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3342,7 +3342,7 @@
"description": "AzerothCore module that adds a random attribute book.", "description": "AzerothCore module that adds a random attribute book.",
"type": "cpp", "type": "cpp",
"category": "admin", "category": "admin",
"notes": "DISABLED: 'OnLogin' marked 'override' but does not override", "notes": "DISABLED: 'OnLogin' marked 'override' but does not override \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3358,7 +3358,7 @@
"description": "AzerothCore module that allows prestige at max level.", "description": "AzerothCore module that allows prestige at max level.",
"type": "cpp", "type": "cpp",
"category": "progression", "category": "progression",
"notes": "DISABLED: use of undeclared identifier 'sSpellMgr'", "notes": "DISABLED: use of undeclared identifier 'sSpellMgr' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3405,7 +3405,7 @@
"description": "An Azeroth Core module that adds alternative XP gains.", "description": "An Azeroth Core module that adds alternative XP gains.",
"type": "cpp", "type": "cpp",
"category": "progression", "category": "progression",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3437,7 +3437,7 @@
"description": "Ensures all party members have matching faction before queueing into battleground.", "description": "Ensures all party members have matching faction before queueing into battleground.",
"type": "cpp", "type": "cpp",
"category": "pvp", "category": "pvp",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3484,7 +3484,7 @@
"description": "AzerothCore custom module which allows filtering traded items", "description": "AzerothCore custom module which allows filtering traded items",
"type": "cpp", "type": "cpp",
"category": "economy", "category": "economy",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3500,7 +3500,7 @@
"description": "This module allows you to search for quests by ID, which gives you greater precision in your search.", "description": "This module allows you to search for quests by ID, which gives you greater precision in your search.",
"type": "cpp", "type": "cpp",
"category": "scripting", "category": "scripting",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3516,7 +3516,7 @@
"description": "PvPScript for Azerothcore", "description": "PvPScript for Azerothcore",
"type": "cpp", "type": "cpp",
"category": "pvp", "category": "pvp",
"notes": "DISABLED: no member named 'SendNotification' in 'WorldSession'", "notes": "DISABLED: no member named 'SendNotification' in 'WorldSession' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3532,7 +3532,7 @@
"description": "Broadcasts items with the ITEM_FLAG_REPORT_TO_GUILD_CHAT flag to guild chat.", "description": "Broadcasts items with the ITEM_FLAG_REPORT_TO_GUILD_CHAT flag to guild chat.",
"type": "cpp", "type": "cpp",
"category": "scripting", "category": "scripting",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3548,7 +3548,7 @@
"description": "Fixes FFA for safe zones.", "description": "Fixes FFA for safe zones.",
"type": "cpp", "type": "cpp",
"category": "content", "category": "content",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3564,7 +3564,7 @@
"description": "AzerothCore Interconnect", "description": "AzerothCore Interconnect",
"type": "cpp", "type": "cpp",
"category": "tooling", "category": "tooling",
"notes": "DISABLED: no member named 'StringFormatFmt' in namespace 'Acore'", "notes": "DISABLED: no member named 'StringFormatFmt' in namespace 'Acore' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3708,7 +3708,7 @@
"description": "Module for Azerothcore to teleport players to with a command", "description": "Module for Azerothcore to teleport players to with a command",
"type": "cpp", "type": "cpp",
"category": "quality-of-life", "category": "quality-of-life",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3739,7 +3739,7 @@
"description": "Adds a re-roll item for StatBooster bonus stat.", "description": "Adds a re-roll item for StatBooster bonus stat.",
"type": "cpp", "type": "cpp",
"category": "rewards", "category": "rewards",
"notes": "DISABLED: 'StatBoostMgr.h' file not found", "notes": "DISABLED: 'StatBoostMgr.h' file not found \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3835,7 +3835,7 @@
"description": "All-In-One Solution module to easily enable features for new players", "description": "All-In-One Solution module to easily enable features for new players",
"type": "cpp", "type": "cpp",
"category": "progression", "category": "progression",
"notes": "DISABLED: no member named 'getLevel'; did you mean 'GetLevel'?", "notes": "DISABLED: no member named 'getLevel'; did you mean 'GetLevel'? \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3851,7 +3851,7 @@
"description": "Custom scipts and modules for Azerothcore", "description": "Custom scipts and modules for Azerothcore",
"type": "cpp", "type": "cpp",
"category": "scripting", "category": "scripting",
"notes": "DISABLED: no member named 'PQuery' / 'outString' in Log", "notes": "DISABLED: no member named 'PQuery' / 'outString' in Log \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3899,7 +3899,7 @@
"description": "", "description": "",
"type": "cpp", "type": "cpp",
"category": "content", "category": "content",
"notes": "DISABLED: Missing config identifier", "notes": "DISABLED: Missing config identifier \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3915,7 +3915,7 @@
"description": "Pushes server and player events into an influxdb instance.", "description": "Pushes server and player events into an influxdb instance.",
"type": "cpp", "type": "cpp",
"category": "scripting", "category": "scripting",
"notes": "DISABLED: Build fails - requires CURL library (missing: CURL_LIBRARY CURL_INCLUDE_DIR)", "notes": "DISABLED: Build fails - requires CURL library (missing: CURL_LIBRARY CURL_INCLUDE_DIR) \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"block_reason": "CMake Error: Could NOT find CURL", "block_reason": "CMake Error: Could NOT find CURL",
"order": 5000, "order": 5000,
@@ -3931,7 +3931,7 @@
"description": "Spell Regulator module for AzerothCore", "description": "Spell Regulator module for AzerothCore",
"type": "cpp", "type": "cpp",
"category": "scripting", "category": "scripting",
"notes": "DISABLED: redefinition of 'AddSpellRegulatorScripts'", "notes": "DISABLED: redefinition of 'AddSpellRegulatorScripts' \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3947,7 +3947,7 @@
"description": "Module for Azerothcore", "description": "Module for Azerothcore",
"type": "cpp", "type": "cpp",
"category": "progression", "category": "progression",
"notes": "DISABLED: 'ChatHandler' is an incomplete type", "notes": "DISABLED: 'ChatHandler' is an incomplete type \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -3979,7 +3979,7 @@
"description": "WIP reward system for AC ingame shop", "description": "WIP reward system for AC ingame shop",
"type": "cpp", "type": "cpp",
"category": "economy", "category": "economy",
"notes": "DISABLED: API incompatibility - ConfigMgr missing GetIntDefault method", "notes": "DISABLED: API incompatibility - ConfigMgr missing GetIntDefault method \nDiscovered via GitHub topic 'azerothcore-module'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -4149,7 +4149,7 @@
"description": "ACore CMS based on Wordpress", "description": "ACore CMS based on Wordpress",
"type": "tool", "type": "tool",
"category": "tooling", "category": "tooling",
"notes": "Disabled due to runtime error: Table 'acore_auth.acore_cms_subscriptions' doesn't exist - causes server abort", "notes": "Disabled due to runtime error: Table 'acore_auth.acore_cms_subscriptions' doesn't exist - causes server abort \nDiscovered via GitHub topic 'azerothcore-tools'",
"status": "blocked", "status": "blocked",
"block_reason": "Runtime error: Missing database table", "block_reason": "Runtime error: Missing database table",
"order": 5000, "order": 5000,
@@ -5065,7 +5065,7 @@
"description": "", "description": "",
"type": "lua", "type": "lua",
"category": "scripting", "category": "scripting",
"notes": "DISABLED: Git clone fails with 'unknown switch E' error - likely due to hyphen in repo name", "notes": "DISABLED: Git clone fails with 'unknown switch E' error - likely due to hyphen in repo name \nDiscovered via GitHub topic 'azerothcore-lua'",
"status": "blocked", "status": "blocked",
"block_reason": "Git clone error: unknown switch 'E'", "block_reason": "Git clone error: unknown switch 'E'",
"order": 5000, "order": 5000,
@@ -5233,7 +5233,7 @@
"description": "This module adds thematically appropriate green and blue loot drops to ALL 450 Classic rares in Kalimdor and the Eastern Kingdoms.", "description": "This module adds thematically appropriate green and blue loot drops to ALL 450 Classic rares in Kalimdor and the Eastern Kingdoms.",
"type": "sql", "type": "sql",
"category": "database", "category": "database",
"notes": "DISABLED: only virtual member functions can be marked 'override'", "notes": "DISABLED: only virtual member functions can be marked 'override' \nDiscovered via GitHub topic 'azerothcore-sql'",
"status": "blocked", "status": "blocked",
"order": 5000, "order": 5000,
"requires": [], "requires": [],
@@ -5391,6 +5391,132 @@
"post_install_hooks": [], "post_install_hooks": [],
"config_cleanup": [], "config_cleanup": [],
"last_modified": "2020-12-16T18:26:39Z" "last_modified": "2020-12-16T18:26:39Z"
},
{
"key": "MODULE_MOD_BOTS_LOGIN_FIX",
"name": "mod-bots-login-fix",
"repo": "https://github.com/BeardBear33/mod-bots-login-fix.git",
"description": "Oprava duplicitn\u00edho p\u0159ipojen\u00ed na AltBota pro modul Playerbots. // Fix for duplicate connections to an AltBot for the Playerbots module.",
"type": "cpp",
"category": "uncategorized",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_MOD_MATERIAL_BANK",
"name": "mod-material-bank",
"repo": "https://github.com/BeardBear33/mod-material-bank.git",
"description": "Module for WoW 3.3.5a (AzerothCore \u2013 Playerbots). Tested on Ubuntu.",
"type": "cpp",
"category": "uncategorized",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_MOD_PROGRESSION_BLIZZLIKE",
"name": "mod-progression-blizzlike",
"repo": "https://github.com/kambire/mod-progression-blizzlike.git",
"description": "Modular progression system for AzerothCore built around brackets (Vanilla/TBC/WotLK + Arena seasons). Each enabled bracket can load its own C++ scripts and SQL updates.",
"type": "cpp",
"category": "uncategorized",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_MOD_PYTHON_ENGINE",
"name": "mod-python-engine",
"repo": "https://github.com/privatecore/mod-python-engine.git",
"description": "A Python Scripting Engine module for AzerothCore",
"type": "cpp",
"category": "uncategorized",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_WRATH_OF_THE_VANILLA_V2",
"name": "wrath-of-the-vanilla-v2",
"repo": "https://github.com/Hextv/wrath-of-the-vanilla-v2.git",
"description": "Project that focuses on turning a server running AzerothCore into one limited to the original (vanilla) content.",
"type": "sql",
"category": "database",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_AZEROTHMCP",
"name": "azerothMCP",
"repo": "https://github.com/blinkysc/azerothMCP.git",
"description": "MCP server for AzerothCore",
"type": "tool",
"category": "tooling",
"notes": "Discovered via GitHub topic 'azerothcore-tools' Not directly related to runtime functionality of AzerothCore",
"status": "blocked",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_AZEROTHCORE_REALMMASTER",
"name": "AzerothCore-RealmMaster",
"repo": "https://github.com/uprightbass360/AzerothCore-RealmMaster.git",
"description": "Automated AzerothCore docker stack with tooling. ",
"type": "tool",
"category": "tooling",
"notes": "Discovered via GitHub topic 'azerothcore-tools'",
"status": "blocked",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_DUELS",
"name": "Duels",
"repo": "https://github.com/VikasMahajan370/Duels.git",
"description": "\u2694\ufe0f Experience engaging duels with a feature-rich plugin for Paper 1.21+ servers, featuring 1.8 Legacy combat, matchmaking, custom kits, and more.",
"type": "cpp",
"category": "uncategorized",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_WOW_CORE",
"name": "wow-core",
"repo": "https://github.com/Wow-Libre/wow-core.git",
"description": "Complete management platform for private World of Warcraft servers.",
"type": "tool",
"category": "tooling",
"notes": "Discovered via GitHub topic 'azerothcore-tools'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
} }
] ]
} }

View File

@@ -22,7 +22,6 @@
"MODULE_ASSISTANT", "MODULE_ASSISTANT",
"MODULE_REAGENT_BANK", "MODULE_REAGENT_BANK",
"MODULE_BLACK_MARKET_AUCTION_HOUSE", "MODULE_BLACK_MARKET_AUCTION_HOUSE",
"MODULE_ELUNA_TS",
"MODULE_ELUNA", "MODULE_ELUNA",
"MODULE_AIO", "MODULE_AIO",
"MODULE_ELUNA_SCRIPTS", "MODULE_ELUNA_SCRIPTS",

View File

@@ -1,8 +1,8 @@
{ {
"modules": [ "modules": [
"MODULE_ELUNA"
], ],
"label": "\u2b50 AzerothCore Main - Mod Free", "label": "\ud83d\udd30 AzerothCore Main - Mod Free",
"description": "Pure AzerothCore with no optional modules enabled", "description": "Pure AzerothCore with no optional modules enabled",
"order": 3 "order": 3
} }

View File

@@ -1,8 +1,7 @@
{ {
"modules": [ "modules": [
"MODULE_PLAYERBOTS", "MODULE_PLAYERBOTS",
"MODULE_ELUNA", "MODULE_ELUNA"
"MODULE_ELUNA_TS"
], ],
"label": "\ud83e\udde9 Playerbots Only", "label": "\ud83e\udde9 Playerbots Only",
"description": "Minimal preset that only enables playerbot prerequisites", "description": "Minimal preset that only enables playerbot prerequisites",

View File

@@ -8,11 +8,10 @@
"MODULE_NPC_BUFFER", "MODULE_NPC_BUFFER",
"MODULE_LEARN_SPELLS", "MODULE_LEARN_SPELLS",
"MODULE_FIREWORKS", "MODULE_FIREWORKS",
"MODULE_ELUNA_TS",
"MODULE_ELUNA", "MODULE_ELUNA",
"MODULE_AIO" "MODULE_AIO"
], ],
"label": "\ud83e\udd16 Playerbots + Suggested modules", "label": "\ud83e\udd16 Suggested modules (Playerbots)",
"description": "Suggested stack plus playerbots enabled", "description": "Suggested stack plus playerbots enabled",
"order": 1 "order": 1
} }

View File

@@ -1,6 +1,5 @@
{ {
"modules": [ "modules": [
"MODULE_ELUNA_TS",
"MODULE_ELUNA", "MODULE_ELUNA",
"MODULE_AIO", "MODULE_AIO",
"MODULE_SOLO_LFG", "MODULE_SOLO_LFG",
@@ -11,7 +10,7 @@
"MODULE_LEARN_SPELLS", "MODULE_LEARN_SPELLS",
"MODULE_FIREWORKS" "MODULE_FIREWORKS"
], ],
"label": "\u2b50 Suggested Modules", "label": "\u2b50 Suggested Modules (Main)",
"description": "Baseline solo-friendly quality of life mix (no playerbots)", "description": "Baseline solo-friendly quality of life mix (no playerbots)",
"order": 2 "order": 2
} }

View File

@@ -25,6 +25,8 @@ services:
MYSQL_MAX_CONNECTIONS: ${MYSQL_MAX_CONNECTIONS} MYSQL_MAX_CONNECTIONS: ${MYSQL_MAX_CONNECTIONS}
MYSQL_INNODB_BUFFER_POOL_SIZE: ${MYSQL_INNODB_BUFFER_POOL_SIZE} MYSQL_INNODB_BUFFER_POOL_SIZE: ${MYSQL_INNODB_BUFFER_POOL_SIZE}
MYSQL_INNODB_LOG_FILE_SIZE: ${MYSQL_INNODB_LOG_FILE_SIZE} MYSQL_INNODB_LOG_FILE_SIZE: ${MYSQL_INNODB_LOG_FILE_SIZE}
MYSQL_BINLOG_EXPIRE_LOGS_SECONDS: 86400
MYSQL_DISABLE_BINLOG: ${MYSQL_DISABLE_BINLOG}
TZ: "${TZ}" TZ: "${TZ}"
entrypoint: entrypoint:
- /usr/local/bin/mysql-entrypoint.sh - /usr/local/bin/mysql-entrypoint.sh
@@ -46,6 +48,10 @@ services:
- --innodb-buffer-pool-size=${MYSQL_INNODB_BUFFER_POOL_SIZE} - --innodb-buffer-pool-size=${MYSQL_INNODB_BUFFER_POOL_SIZE}
- --innodb-log-file-size=${MYSQL_INNODB_LOG_FILE_SIZE} - --innodb-log-file-size=${MYSQL_INNODB_LOG_FILE_SIZE}
- --innodb-redo-log-capacity=${MYSQL_INNODB_REDO_LOG_CAPACITY} - --innodb-redo-log-capacity=${MYSQL_INNODB_REDO_LOG_CAPACITY}
- --expire_logs_days=0
- --binlog_expire_logs_seconds=86400
- --binlog_expire_logs_auto_purge=ON
stop_grace_period: 2m
restart: unless-stopped restart: unless-stopped
logging: *logging-default logging: *logging-default
healthcheck: healthcheck:

View File

@@ -152,7 +152,7 @@ storage/
├── client-data/ # Unpacked WoW client data & DBC overrides ├── client-data/ # Unpacked WoW client data & DBC overrides
├── logs/ # Server log files ├── logs/ # Server log files
├── modules/ # Downloaded module source code ├── modules/ # Downloaded module source code
├── lua_scripts/ # Eluna Lua scripts (auto-loaded) ├── lua_scripts/ # ALE Lua scripts (auto-loaded)
├── install-markers/ # Module installation state tracking ├── install-markers/ # Module installation state tracking
└── backups/ # Automated database backups └── backups/ # Automated database backups
├── daily/ # Daily backups (retained per BACKUP_RETENTION_DAYS) ├── daily/ # Daily backups (retained per BACKUP_RETENTION_DAYS)
@@ -190,6 +190,26 @@ The build system is optimized for development and production deployments with Do
- Build artifact caching for faster rebuilds - Build artifact caching for faster rebuilds
- Support for custom patches and modifications - Support for custom patches and modifications
### Module Build Source Path
**`MODULES_REBUILD_SOURCE_PATH`** - Path to AzerothCore source used for C++ module compilation.
**Default:** `${STORAGE_PATH_LOCAL}/source/azerothcore`
Auto-selects the appropriate fork:
- Playerbots enabled → `./local-storage/source/azerothcore-playerbots`
- Standard build → `./local-storage/source/azerothcore`
**Custom Override:**
```bash
MODULES_REBUILD_SOURCE_PATH=/path/to/custom/azerothcore
```
**Notes:**
- Must be a valid AzerothCore git repository
- Cannot be inside `STORAGE_PATH` (performance)
- Auto-managed by `setup-source.sh` and `rebuild-with-modules.sh`
## Custom Configuration ## Custom Configuration
Advanced customization options for specialized deployments and development environments. Advanced customization options for specialized deployments and development environments.

321
docs/CICD.md Normal file
View File

@@ -0,0 +1,321 @@
# CI/CD Documentation
This document describes the continuous integration and deployment workflows configured for the AzerothCore RealmMaster project.
## Build and Publish Workflow
The `build-and-publish.yml` workflow automatically builds AzerothCore with your configured modules and publishes Docker images to Docker Hub.
### Trigger Schedule
- **Nightly builds**: Runs automatically at 2 AM UTC every day
- **Manual trigger**: Can be triggered manually via GitHub Actions UI with optional force rebuild
### What It Does
1. **Checks out the repository** - Gets the RealmMaster project code
2. **Sets up Git** - Configures git for module repository cloning
3. **Sets up Docker Buildx** - Enables optimized Docker builds
4. **Logs in to Docker Hub** - Authenticates for image publishing
5. **Prepares the build environment**:
- Runs `./setup.sh --non-interactive --module-config RealmMaster --force`
- Uses the same setup process as local builds (ensures consistency)
- Applies the **RealmMaster module profile** from `config/module-profiles/RealmMaster.json`
- Creates `.env` with proper paths and configured modules (32 modules)
- Automatically selects correct source variant (standard or playerbots)
6. **Caches build artifacts** to speed up subsequent builds:
- Go build cache (`.gocache`)
- Source repository (`local-storage/source`)
7. **Sets up Python 3.11** - Required for module management scripts
8. **Runs `./build.sh --yes`** - This is where the magic happens:
- **Step 1**: Sets up the AzerothCore source repository
- **Step 2**: Detects build requirements
- **Step 3**: Syncs module metadata
- **Step 4**: **Fetches all module repositories** - Automatically clones all 32 enabled module repos from GitHub
- **Step 5**: **Compiles AzerothCore** with all fetched modules integrated
- **Step 6**: Tags the compiled images
9. **Tags images for Docker Hub** - Prepares `latest` and date-based tags
10. **Pushes images to Docker Hub** - Publishes the built images
11. **Generates a build summary** - Shows enabled modules and published images
### Module Fetching Process
The workflow **automatically fetches all module repositories** during the build. Here's how it works:
- The `build.sh` script reads the enabled modules from `.env` (set by the RealmMaster profile)
- For each enabled module, it clones the repository from GitHub (all modules are public repos)
- Module repositories are cloned into the AzerothCore source tree under `modules/`
- Examples of fetched repositories:
- `mod-playerbots` from https://github.com/mod-playerbots/mod-playerbots.git
- `mod-transmog` from https://github.com/azerothcore/mod-transmog.git
- `mod-solo-lfg` from https://github.com/azerothcore/mod-solo-lfg.git
- ...and 29 more
**No manual module setup required!** The build process handles everything automatically.
### Published Images
The workflow publishes images with **profile-specific tags** so you know exactly which modules are included:
**Profile-Tagged Images** (recommended):
- `<dockerhub-username>/azerothcore-realmmaster:authserver-realmmaster-latest` ✅ Built nightly
- `<dockerhub-username>/azerothcore-realmmaster:authserver-realmmaster-YYYYMMDD` ✅ Built nightly
- `<dockerhub-username>/azerothcore-realmmaster:worldserver-realmmaster-latest` ✅ Built nightly
- `<dockerhub-username>/azerothcore-realmmaster:worldserver-realmmaster-YYYYMMDD` ✅ Built nightly
**Generic Tags** (backward compatibility, defaults to RealmMaster profile):
- `<dockerhub-username>/azerothcore-realmmaster:authserver-latest` ✅ Built nightly
- `<dockerhub-username>/azerothcore-realmmaster:worldserver-latest` ✅ Built nightly
**Other Profile Tags** (built on-demand via manual workflow trigger):
- `authserver-suggested-modules-latest` - Available when built
- `authserver-all-modules-latest` - Available when built
- `authserver-playerbots-only-latest` - Available when built
**Note**: Only the RealmMaster profile is built automatically on schedule. Other profiles can be built by manually triggering the workflow with different profile names.
## Required GitHub Secrets
To enable the build and publish workflow, you must configure the following secrets in your GitHub repository:
### Setting Up Secrets
1. Go to your GitHub repository
2. Click **Settings****Secrets and variables****Actions**
3. Click **New repository secret**
4. Add the following secrets:
#### DOCKERHUB_USERNAME
Your Docker Hub username.
**Example**: `yourusername`
#### DOCKERHUB_TOKEN
A Docker Hub access token (recommended) or your Docker Hub password.
**How to create a Docker Hub access token**:
1. Log in to [Docker Hub](https://hub.docker.com/)
2. Click on your username in the top right → **Account Settings**
3. Go to **Security****Personal Access Tokens****Generate New Token**
4. Give it a description (e.g., "GitHub Actions")
5. Set permissions: **Read & Write**
6. Click **Generate**
7. Copy the token (you won't be able to see it again)
8. Add this token as the `DOCKERHUB_TOKEN` secret in GitHub
## Module Configuration
### Default Profile: RealmMaster
The workflow uses the **RealmMaster** module profile by default, which includes 32 carefully selected modules:
- MODULE_PLAYERBOTS - AI-controlled player characters
- MODULE_TRANSMOG - Transmogrification system
- MODULE_SOLO_LFG - Solo dungeon finder
- MODULE_NPC_BUFFER - Buff NPC
- MODULE_ELUNA - Lua scripting engine
- MODULE_AIO - All-in-one interface
- ...and 26 more modules
See the full list in `config/module-profiles/RealmMaster.json`.
### Customizing the Module Profile
To use a different module profile in the CI/CD workflow:
1. **Choose or create a profile** in `config/module-profiles/`:
- `RealmMaster.json` - Default (32 modules)
- `suggested-modules.json` - Alternative suggested set
- `playerbots-only.json` - Just playerbots
- `all-modules.json` - All supported modules
- Create your own JSON file
2. **Edit the workflow** at `.github/workflows/build-and-publish.yml`:
```yaml
# Change this line in the "Prepare build environment" step:
python3 scripts/python/apply_module_profile.py RealmMaster \
# To use a different profile:
python3 scripts/python/apply_module_profile.py suggested-modules \
```
3. **Update the build summary** (optional):
```yaml
# Change this line in the "Build summary" step:
echo "- **Module Profile**: RealmMaster" >> $GITHUB_STEP_SUMMARY
# To:
echo "- **Module Profile**: suggested-modules" >> $GITHUB_STEP_SUMMARY
```
### Testing Module Profiles Locally
You can test the module profile script locally before committing:
```bash
# List modules that will be enabled
python3 scripts/python/apply_module_profile.py RealmMaster --list-modules
# Apply a profile to create .env
python3 scripts/python/apply_module_profile.py RealmMaster
# Verify the result
grep '^MODULE_.*=1' .env | wc -l
```
## Cache Strategy
The workflow uses GitHub Actions cache to speed up builds:
- **Go build cache**: Cached in `.gocache` directory
- **Source repository**: Cached in `local-storage/source` directory
This significantly reduces build times for subsequent runs.
## Manual Workflow Trigger
To manually trigger the workflow:
1. Go to **Actions** tab in your GitHub repository
2. Click on **Build and Publish** workflow
3. Click **Run workflow**
4. **Choose module profile** (default: RealmMaster):
- Enter profile name (e.g., `RealmMaster`, `suggested-modules`, `all-modules`, `playerbots-only`)
- Profile must exist in `config/module-profiles/`
5. Optionally check **Force rebuild** to rebuild even if no changes detected
6. Click **Run workflow**
The workflow will build with the selected profile and tag images accordingly (e.g., `authserver-realmmaster-latest` for RealmMaster profile).
## Troubleshooting
### Build fails with "missing required command"
The workflow runs on Ubuntu and has Docker and Python 3.11 pre-installed. If you see missing command errors, ensure the build script dependencies are available.
### Authentication errors
If you see Docker Hub authentication errors:
- Verify `DOCKERHUB_USERNAME` and `DOCKERHUB_TOKEN` secrets are set correctly
- Ensure the Docker Hub token has **Read & Write** permissions
- Check that the token hasn't expired
### Build timeout
The workflow has a 120-minute timeout. If builds consistently exceed this:
- Consider optimizing the build process
- Check if all module sources are accessible
- Review cache effectiveness
## Using Pre-Built Images
After images are published to Docker Hub, users can deploy RealmMaster **without building locally**!
### For End Users
See the complete guide at **[docs/PREBUILT_IMAGES.md](PREBUILT_IMAGES.md)** for step-by-step instructions.
**Quick start for users**:
```bash
# Clone the repository
git clone https://github.com/uprightbass360/AzerothCore-RealmMaster.git
cd AzerothCore-RealmMaster
# Use pre-built configuration
cp .env.prebuilt .env
# Edit .env and set DOCKERHUB_USERNAME=your-dockerhub-username
# Deploy (no build required!)
./deploy.sh
```
### For Developers
To test the published images:
```bash
# Pull latest RealmMaster profile images
docker pull <dockerhub-username>/azerothcore-realmmaster:authserver-realmmaster-latest
docker pull <dockerhub-username>/azerothcore-realmmaster:worldserver-realmmaster-latest
# Or pull specific date-tagged images
docker pull <dockerhub-username>/azerothcore-realmmaster:authserver-realmmaster-20260109
docker pull <dockerhub-username>/azerothcore-realmmaster:worldserver-realmmaster-20260109
# Or use generic latest tags (defaults to RealmMaster profile)
docker pull <dockerhub-username>/azerothcore-realmmaster:authserver-latest
docker pull <dockerhub-username>/azerothcore-realmmaster:worldserver-latest
```
### Pre-Built Configuration File
The `.env.prebuilt` template provides a minimal configuration that:
- References Docker Hub images instead of local builds
- Removes all build-related variables
- Includes only runtime configuration
- Is ready to use with minimal editing (just set DOCKERHUB_USERNAME)
**Benefits of pre-built images**:
- ✅ Skip 15-45 minute build time
- ✅ No build dependencies required
- ✅ Same 32 RealmMaster modules included
- ✅ Automatic nightly updates available
- ✅ Date-tagged versions for stability
- ✅ Profile-tagged images for clear identification
## Building Multiple Profiles
You can build different module profiles by manually triggering the workflow:
### Example: Build All Modules Profile
1. Go to **Actions** → **Build and Publish**
2. Click **Run workflow**
3. Set **module_profile** to `all-modules`
4. Click **Run workflow**
This will create:
- `authserver-all-modules-latest`
- `authserver-all-modules-YYYYMMDD`
- `worldserver-all-modules-latest`
- `worldserver-all-modules-YYYYMMDD`
### Creating Custom Profile Builds
To build a custom profile:
1. **Create profile JSON** in `config/module-profiles/my-custom-profile.json`:
```json
{
"modules": [
"MODULE_PLAYERBOTS",
"MODULE_TRANSMOG",
"MODULE_SOLO_LFG"
],
"label": "My Custom Profile",
"description": "Custom module selection",
"order": 100
}
```
2. **Trigger workflow** with profile name `my-custom-profile`
3. **Images created**:
- `authserver-my-custom-profile-latest`
- `worldserver-my-custom-profile-latest`
### Scheduled Builds
The nightly scheduled build always uses the **RealmMaster** profile. To schedule builds for different profiles, you can:
1. Create additional workflow files (e.g., `.github/workflows/build-all-modules.yml`)
2. Set different cron schedules
3. Hardcode the profile name in the workflow

View File

@@ -187,6 +187,8 @@ Because MySQL stores its hot data in a tmpfs (`/var/lib/mysql-runtime`) while pe
- If **any tables exist**, the script logs `Backup restoration completed successfully` and skips the expensive restore just as before. - If **any tables exist**, the script logs `Backup restoration completed successfully` and skips the expensive restore just as before.
- If **no tables are found or the query fails**, the script logs `Restoration marker found, but databases are empty - forcing re-import`, automatically clears the stale marker, and reruns the backup restore + `dbimport` pipeline so services always start with real data. - If **no tables are found or the query fails**, the script logs `Restoration marker found, but databases are empty - forcing re-import`, automatically clears the stale marker, and reruns the backup restore + `dbimport` pipeline so services always start with real data.
On graceful shutdown, the MySQL container now syncs the tmpfs datadir back into `/var/lib/mysql-persistent` so a normal restart keeps the latest state. Unclean shutdowns (host reboot, OOM kill) can still lose recent changes, so the backup restore path remains the safety net.
To complement that one-shot safety net, the long-running `ac-db-guard` service now watches the runtime tmpfs. It polls MySQL, and if it ever finds those schemas empty (the usual symptom after a daemon restart), it automatically reruns `db-import-conditional.sh` to rehydrate from the most recent backup before marking itself healthy. All auth/world services now depend on `ac-db-guard`'s health check, guaranteeing that AzerothCore never boots without real tables in memory. The guard also mounts the working SQL tree from `local-storage/source/azerothcore-playerbots/data/sql` into the db containers so that every `dbimport` run uses the exact SQL that matches your checked-out source, even if the Docker image was built earlier. To complement that one-shot safety net, the long-running `ac-db-guard` service now watches the runtime tmpfs. It polls MySQL, and if it ever finds those schemas empty (the usual symptom after a daemon restart), it automatically reruns `db-import-conditional.sh` to rehydrate from the most recent backup before marking itself healthy. All auth/world services now depend on `ac-db-guard`'s health check, guaranteeing that AzerothCore never boots without real tables in memory. The guard also mounts the working SQL tree from `local-storage/source/azerothcore-playerbots/data/sql` into the db containers so that every `dbimport` run uses the exact SQL that matches your checked-out source, even if the Docker image was built earlier.
Because new features sometimes require schema changes even when the databases already contain data, `ac-db-guard` now performs a `dbimport` verification sweep (configurable via `DB_GUARD_VERIFY_INTERVAL_SECONDS`) to proactively apply any outstanding updates from the mounted SQL tree. By default it runs once per bootstrap and then every 24 hours, so the auth/world servers always see the columns/tables expected by their binaries without anyone having to run host scripts manually. Because new features sometimes require schema changes even when the databases already contain data, `ac-db-guard` now performs a `dbimport` verification sweep (configurable via `DB_GUARD_VERIFY_INTERVAL_SECONDS`) to proactively apply any outstanding updates from the mounted SQL tree. By default it runs once per bootstrap and then every 24 hours, so the auth/world servers always see the columns/tables expected by their binaries without anyone having to run host scripts manually.

View File

@@ -158,7 +158,7 @@ The module collection is organized into the following categories:
| **[eluna-scripts](https://github.com/Isidorsson/Eluna-scripts.git)** | Collection of Lua scripts for creating custom gameplay mechanics and features | | **[eluna-scripts](https://github.com/Isidorsson/Eluna-scripts.git)** | Collection of Lua scripts for creating custom gameplay mechanics and features |
| **[eluna-ts](https://github.com/azerothcore/eluna-ts.git)** | Adds a TS-to-Lua workflow so Eluna scripts can be authored with modern tooling | | **[eluna-ts](https://github.com/azerothcore/eluna-ts.git)** | Adds a TS-to-Lua workflow so Eluna scripts can be authored with modern tooling |
| **[mod-aio](https://github.com/Rochet2/AIO.git)** | Pure Lua server-client communication system for bidirectional data transmission | | **[mod-aio](https://github.com/Rochet2/AIO.git)** | Pure Lua server-client communication system for bidirectional data transmission |
| **[mod-ale](https://github.com/azerothcore/mod-ale.git)** | Adds Eluna Lua scripting engine for creating custom gameplay mechanics | | **[mod-ale](https://github.com/azerothcore/mod-ale.git)** | ALE (AzerothCore Lua Engine) - Lua scripting engine for custom gameplay mechanics (formerly Eluna) |
## Admin Tools ## Admin Tools

View File

@@ -142,7 +142,7 @@ MODULES_ENABLED="mod-playerbots mod-aoe-loot ..."
**What Gets Built:** **What Gets Built:**
- AzerothCore with playerbots branch - AzerothCore with playerbots branch
- 93 modules compiled and integrated in this run (current manifest: 348 total / 221 supported) - 93 modules compiled and integrated in this run (current manifest: 348 total / 221 supported)
- Custom Docker images: `acore-compose:worldserver-modules-latest` etc. - Custom Docker images: `${COMPOSE_PROJECT_NAME}:worldserver-modules-latest` etc.
### Deployment Status: READY TO DEPLOY 🚀 ### Deployment Status: READY TO DEPLOY 🚀

339
docs/PREBUILT_IMAGES.md Normal file
View File

@@ -0,0 +1,339 @@
# Deploying Pre-Built RealmMaster Images
This guide explains how to deploy AzerothCore RealmMaster using pre-built Docker images from Docker Hub. **No local building required!**
## What's Included in Pre-Built Images
The pre-built images are automatically built nightly with the **RealmMaster module profile**, which includes **32 carefully selected modules**:
- **MODULE_PLAYERBOTS** - AI-controlled player characters
- **MODULE_TRANSMOG** - Transmogrification system
- **MODULE_SOLO_LFG** - Solo dungeon finder
- **MODULE_ELUNA** - Lua scripting engine
- **MODULE_AIO** - All-in-one interface
- **MODULE_NPC_BUFFER** - Buff NPC
- **MODULE_NPC_BEASTMASTER** - Pet management
- **MODULE_SOLOCRAFT** - Solo dungeon scaling
- **MODULE_1V1_ARENA** - 1v1 arena system
- **MODULE_ACCOUNT_ACHIEVEMENTS** - Account-wide achievements
- ...and 22 more modules!
See `config/module-profiles/RealmMaster.json` for the complete list.
## Prerequisites
- Docker with Docker Compose v2
- 16GB+ RAM
- 64GB+ storage
- Linux/macOS/WSL2
## Quick Start
### 1. Clone the Repository
```bash
git clone https://github.com/uprightbass360/AzerothCore-RealmMaster.git
cd AzerothCore-RealmMaster
```
### 2. Create Configuration File
```bash
# Copy the pre-built images template
cp .env.prebuilt .env
```
### 3. Configure Docker Hub Username
Edit `.env` and set your Docker Hub username:
```bash
# Change this line:
DOCKERHUB_USERNAME=your-dockerhub-username
# To (example):
DOCKERHUB_USERNAME=uprightbass360
```
### 4. Optional: Customize Settings
Edit `.env` to customize:
- **Server address**: `SERVER_ADDRESS=your-server-ip`
- **Passwords**: `MYSQL_ROOT_PASSWORD=your-password`
- **Playerbot population**: `PLAYERBOT_MIN_BOTS` and `PLAYERBOT_MAX_BOTS`
- **Server preset**: `SERVER_CONFIG_PRESET=fast-leveling` (or blizzlike, hardcore-pvp, casual-pve)
### 5. Deploy
```bash
./deploy.sh
```
The deployment will:
- Pull pre-built images from Docker Hub
- Set up MySQL database with all module SQL
- Configure client data
- Start all services
**First deployment takes 30-60 minutes** for database setup and client data download.
## Image Tags
The CI/CD workflow publishes images with **profile-specific tags** so you know exactly which modules are included:
### Profile-Tagged Images (Recommended)
Each module profile gets its own tag:
- **`:authserver-realmmaster-latest`** - RealmMaster profile (32 modules)
- **`:worldserver-realmmaster-latest`** - RealmMaster profile (32 modules)
- **`:authserver-realmmaster-YYYYMMDD`** - Date-tagged RealmMaster builds
- **`:worldserver-realmmaster-YYYYMMDD`** - Date-tagged RealmMaster builds
Other profiles (available when built via GitHub Actions):
- **`:authserver-suggested-modules-latest`** - Suggested modules profile (not yet published)
- **`:authserver-all-modules-latest`** - All modules profile (not yet published)
- **`:authserver-playerbots-only-latest`** - Playerbots only (not yet published)
**Note**: Currently only the RealmMaster profile is built nightly. Other profiles can be built on-demand by manually triggering the CI/CD workflow.
### Generic Tags (Backward Compatibility)
- **`:authserver-latest`** - Latest build (defaults to RealmMaster profile)
- **`:worldserver-latest`** - Latest build (defaults to RealmMaster profile)
### Choosing a Profile
In `.env.prebuilt`, set the `MODULE_PROFILE` variable:
```bash
# Choose your profile
MODULE_PROFILE=realmmaster # 32 modules (default, recommended)
# MODULE_PROFILE=suggested-modules # Alternative module set
# MODULE_PROFILE=all-modules # All supported modules
# MODULE_PROFILE=playerbots-only # Just playerbots
# Images automatically reference the selected profile
AC_AUTHSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:authserver-${MODULE_PROFILE}-latest
AC_WORLDSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:worldserver-${MODULE_PROFILE}-latest
```
### Using Date-Tagged Images
To pin to a specific build date, edit `.env`:
```bash
# Set your profile
MODULE_PROFILE=realmmaster
# Pin to a specific date (example: January 9, 2026)
AC_AUTHSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:authserver-${MODULE_PROFILE}-20260109
AC_WORLDSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:worldserver-${MODULE_PROFILE}-20260109
```
## Differences from Local Build
### What You DON'T Need
When using pre-built images, you **skip**:
- ❌ Running `./setup.sh` (module selection)
- ❌ Running `./build.sh` (compilation)
- ❌ 15-45 minute build time
- ❌ Build dependencies (Go compiler, etc.)
### What's the Same
Everything else works identically:
- ✅ Database setup and migrations
- ✅ Module SQL installation
- ✅ Configuration management
- ✅ Backup system
- ✅ All management commands
- ✅ phpMyAdmin and Keira3 tools
## Verifying Your Deployment
After deployment completes:
### 1. Check Container Status
```bash
./status.sh
```
You should see all services running:
- ✅ ac-mysql
- ✅ ac-authserver
- ✅ ac-worldserver
- ✅ ac-phpmyadmin
- ✅ ac-keira3
### 2. Verify Modules Are Loaded
Check the worldserver logs:
```bash
docker logs ac-worldserver | grep "module"
```
You should see messages about 32 modules being loaded.
### 3. Access Management Tools
- **phpMyAdmin**: http://localhost:8081
- **Keira3**: http://localhost:4201
## Post-Installation
### Create Admin Account
1. Attach to the worldserver container:
```bash
docker attach ac-worldserver
```
2. Create an account and set GM level:
```
account create admin password
account set gmlevel admin 3 -1
```
3. Detach: Press `Ctrl+P` then `Ctrl+Q`
### Configure Client
Edit your WoW 3.3.5a client's `realmlist.wtf`:
```
set realmlist 127.0.0.1
```
(Replace `127.0.0.1` with your server's IP if remote)
## Updating to Latest Images
To update to the latest nightly build:
```bash
# Pull latest images
docker compose pull
# Restart services
docker compose down
docker compose up -d
```
**Note**: Database schema updates will be applied automatically on restart.
## Switching Between Pre-Built and Local Build
### From Pre-Built to Local Build
If you want to customize modules and build locally:
```bash
# Remove pre-built .env
rm .env
# Run interactive setup
./setup.sh
# Build with your custom modules
./build.sh
# Deploy
./deploy.sh
```
### From Local Build to Pre-Built
If you want to use pre-built images instead:
```bash
# Back up your current .env
mv .env .env.custom
# Use pre-built configuration
cp .env.prebuilt .env
# Edit DOCKERHUB_USERNAME in .env
# Deploy
./deploy.sh
```
## Troubleshooting
### Image Pull Errors
**Problem**: `Error response from daemon: manifest not found`
**Solutions**:
1. Verify `DOCKERHUB_USERNAME` is set correctly in `.env`
2. Check that the images exist at: https://hub.docker.com/u/your-username
3. Ensure the CI/CD workflow has run successfully
### Module SQL Not Applied
**Problem**: Modules don't seem to be working
**Solution**: The module SQL is automatically applied during deployment. Check:
```bash
# Verify module SQL staging
ls -la storage/module-sql-updates/
# Check database for module tables
docker exec -it ac-mysql mysql -uroot -p${MYSQL_ROOT_PASSWORD} -e "SHOW TABLES" acore_world | grep -i module
```
### Performance Issues
**Problem**: Server is slow or laggy
**Solutions**:
1. Increase MySQL tmpfs size in `.env`: `MYSQL_RUNTIME_TMPFS_SIZE=16G`
2. Reduce playerbot population: `PLAYERBOT_MAX_BOTS=100`
3. Check system resources: `docker stats`
## Advanced Configuration
### Custom Module Selection
Pre-built images include all RealmMaster modules. To disable specific modules:
1. Edit server configuration files in `storage/config/`
2. Set module enable flags to 0
3. Restart worldserver: `docker compose restart ac-worldserver`
**Note**: You can only disable modules, not add new ones (requires local build).
### Server Configuration Presets
Apply configuration presets for different server types:
```bash
# In .env, set one of these presets:
SERVER_CONFIG_PRESET=blizzlike # Authentic WotLK experience (1x rates)
SERVER_CONFIG_PRESET=fast-leveling # 3x XP rates, QoL improvements
SERVER_CONFIG_PRESET=hardcore-pvp # Competitive PvP (1.5x rates)
SERVER_CONFIG_PRESET=casual-pve # Relaxed PvE (2x rates)
```
Restart after changing: `docker compose restart ac-worldserver`
## Getting Help
- **Documentation**: See other guides in `docs/`
- **GitHub Issues**: https://github.com/uprightbass360/AzerothCore-RealmMaster/issues
- **AzerothCore Discord**: https://discord.gg/gkt4y2x
## Next Steps
- [Database Management](DATABASE_MANAGEMENT.md) - Backups, restores, migrations
- [Getting Started Guide](GETTING_STARTED.md) - Detailed walkthrough
- [Troubleshooting](TROUBLESHOOTING.md) - Common issues and solutions
- [Module Catalog](MODULES.md) - Complete list of available modules

213
docs/RELEASES.md Normal file
View File

@@ -0,0 +1,213 @@
# Release Strategy
This document explains how AzerothCore RealmMaster releases work and what they contain.
## Release Philosophy
Since **Docker images are stored on Docker Hub**, GitHub releases serve as **deployment packages** rather than source distributions. Each release contains everything users need to deploy pre-built images without building from source.
## What's in a Release?
### Release Assets (ZIP Archive)
Each release includes a downloadable `.zip` file containing:
```
azerothcore-realmmaster-v1.0.0-realmmaster.zip
├── .env.prebuilt # Pre-configured for Docker Hub images
├── docker-compose.yml # Service definitions
├── deploy.sh # Deployment script
├── status.sh # Status monitoring
├── cleanup.sh # Cleanup utilities
├── scripts/ # Required Python/Bash scripts
├── config/ # Module manifest and presets
├── docs/ # Complete documentation
├── QUICKSTART.md # Release-specific quick start
└── README.md # Project overview
```
### Release Notes
Each release includes:
- Module profile and count
- Docker Hub image tags (date-specific and latest)
- Quick start instructions
- Complete module list
- Build information (commit, date, source variant)
- Links to documentation
- Known issues
## Release Types
### 1. Profile-Based Releases
Each module profile gets its own release variant:
- **v1.0.0-realmmaster** - RealmMaster profile (32 modules, recommended)
- **v1.0.0-suggested-modules** - Alternative suggested module set
- **v1.0.0-all-modules** - All supported modules
- **v1.0.0-playerbots-only** - Just playerbots
Users choose the release that matches their desired module set.
### 2. Version Numbering
We use semantic versioning:
- **Major** (v1.0.0 → v2.0.0): Breaking changes, major feature additions
- **Minor** (v1.0.0 → v1.1.0): New modules, feature enhancements
- **Patch** (v1.0.0 → v1.0.1): Bug fixes, documentation updates
## Docker Hub Image Tags
Releases reference specific Docker Hub tags:
### Date-Tagged Images (Recommended for Production)
```
uprightbass360/azerothcore-realmmaster:authserver-realmmaster-20260109
uprightbass360/azerothcore-realmmaster:worldserver-realmmaster-20260109
```
- **Immutable**: Never change
- **Stable**: Guaranteed to match the release
- **Recommended**: For production deployments
### Latest Tags (Auto-Updated)
```
uprightbass360/azerothcore-realmmaster:authserver-realmmaster-latest
uprightbass360/azerothcore-realmmaster:worldserver-realmmaster-latest
```
- **Mutable**: Updated nightly by CI/CD
- **Convenient**: Always get the newest build
- **Use case**: Development, testing, staying current
## Creating a Release
### Automated (Recommended)
Use the GitHub Actions workflow:
1. Go to **Actions****Create Release**
2. Click **Run workflow**
3. Fill in:
- **Version**: `v1.0.0`
- **Profile**: `RealmMaster` (or other profile)
- **Pre-release**: Check if beta/RC
4. Click **Run workflow**
The workflow automatically:
- Creates deployment package with all files
- Generates release notes with module list
- Uploads ZIP archive as release asset
- Creates GitHub release with proper tags
### Manual
If you need to create a release manually:
```bash
# 1. Tag the release
git tag -a v1.0.0 -m "Release v1.0.0 - RealmMaster Profile"
git push origin v1.0.0
# 2. Create deployment package
./scripts/create-release-package.sh v1.0.0 RealmMaster
# 3. Create GitHub release
# Go to GitHub → Releases → Draft a new release
# - Tag: v1.0.0
# - Title: RealmMaster v1.0.0 - RealmMaster Profile
# - Upload: azerothcore-realmmaster-v1.0.0-realmmaster.zip
# - Add release notes
```
## Release Checklist
Before creating a release:
- [ ] Verify CI/CD build succeeded
- [ ] Test Docker Hub images work correctly
- [ ] Update CHANGELOG.md
- [ ] Update version in documentation if needed
- [ ] Verify all module SQL migrations are included
- [ ] Test deployment on clean system
- [ ] Update known issues section
## For Users: Using a Release
### Quick Start
```bash
# 1. Download release
wget https://github.com/uprightbass360/AzerothCore-RealmMaster/releases/download/v1.0.0/azerothcore-realmmaster-v1.0.0-realmmaster.zip
# 2. Extract
unzip azerothcore-realmmaster-v1.0.0-realmmaster.zip
cd azerothcore-realmmaster-v1.0.0-realmmaster
# 3. Configure
nano .env.prebuilt
# Set: DOCKERHUB_USERNAME=uprightbass360
# 4. Deploy
mv .env.prebuilt .env
./deploy.sh
```
### Upgrading Between Releases
```bash
# 1. Backup your data
./scripts/bash/backup.sh
# 2. Download new release
wget https://github.com/.../releases/download/v1.1.0/...
# 3. Extract to new directory
unzip azerothcore-realmmaster-v1.1.0-realmmaster.zip
# 4. Copy your .env and data
cp old-version/.env new-version/.env
cp -r old-version/storage new-version/storage
# 5. Deploy new version
cd new-version
./deploy.sh
```
## Release Schedule
- **Nightly Builds**: Images built automatically at 2 AM UTC
- **Releases**: Created as needed when significant changes accumulate
- **LTS Releases**: Planned quarterly for long-term support
## Support
- **Release Issues**: https://github.com/uprightbass360/AzerothCore-RealmMaster/issues
- **Documentation**: Included in each release ZIP
- **Discord**: https://discord.gg/gkt4y2x
## FAQ
### Why are images on Docker Hub and not in releases?
Docker images can be 1-2GB each. GitHub has a 2GB file limit and releases should be lightweight. Docker Hub is designed for hosting images, GitHub releases are for deployment packages.
### Can I use latest tags in production?
We recommend **date-tagged images** for production (e.g., `authserver-realmmaster-20260109`). Latest tags are updated nightly and may have untested changes.
### How do I know which image version a release uses?
Check the release notes - they include the specific Docker Hub tags (date-stamped) that were tested with that release.
### What if I want to build from source instead?
Clone the repository and use `./setup.sh` + `./build.sh` instead of using pre-built releases. See [GETTING_STARTED.md](GETTING_STARTED.md) for instructions.
### Are releases required?
No! You can:
1. **Use releases**: Download ZIP, deploy pre-built images (easiest)
2. **Use nightly images**: Pull latest tags directly from Docker Hub
3. **Build from source**: Clone repo, build locally (most flexible)
Releases are just convenient snapshots for users who want stability.

View File

@@ -96,7 +96,7 @@ Comprehensive cleanup with multiple destruction levels and safety checks.
Starts all configured containers using appropriate profiles. Starts all configured containers using appropriate profiles.
#### `scripts/bash/stop-containers.sh` - Graceful Shutdown #### `scripts/bash/stop-containers.sh` - Graceful Shutdown
Stops all containers with proper cleanup and data protection. Stops all containers with proper cleanup and data protection. The MySQL container performs a shutdown-time sync from tmpfs to persistent storage.
#### `status.sh` - Service Health Monitoring #### `status.sh` - Service Health Monitoring
```bash ```bash
@@ -140,6 +140,147 @@ Restores user accounts and characters from backup while preserving world data.
- `acore_characters.sql[.gz]` - Character data (required) - `acore_characters.sql[.gz]` - Character data (required)
- `acore_world.sql[.gz]` - World data (optional) - `acore_world.sql[.gz]` - World data (optional)
#### `scripts/bash/pdump-import.sh` - Character Import
Imports individual character dump files into the database.
```bash
# Import character from pdump file
./scripts/bash/pdump-import.sh --file character.pdump --account testuser --password azerothcore123
# Import with character rename
./scripts/bash/pdump-import.sh --file oldchar.pdump --account newuser --name "NewName" --password azerothcore123
# Validate pdump without importing (dry run)
./scripts/bash/pdump-import.sh --file character.pdump --account testuser --password azerothcore123 --dry-run
```
**Features:**
- Automatic GUID assignment or manual override with `--guid`
- Character renaming during import with `--name`
- Account validation and character name uniqueness checks
- Automatic database backup before import
- Safe server restart handling
#### `scripts/bash/import-pdumps.sh` - Batch Character Import
Processes multiple character dump files from the `import/pdumps/` directory.
```bash
# Import all pdumps with environment settings
./scripts/bash/import-pdumps.sh --password azerothcore123 --account defaultuser
# Non-interactive batch import
./scripts/bash/import-pdumps.sh --password azerothcore123 --non-interactive
```
**Directory Structure:**
```
import/pdumps/
├── character1.pdump # Character dump files
├── character2.sql # SQL dump files also supported
├── configs/ # Optional per-character configuration
│ ├── character1.conf # account=user1, name=NewName
│ └── character2.conf # account=user2, guid=5000
└── processed/ # Successfully imported files moved here
```
**Configuration Format (`.conf`):**
```ini
account=target_account_name_or_id
name=new_character_name # Optional: rename character
guid=force_specific_guid # Optional: force GUID
```
### Security Management Scripts
#### `scripts/bash/bulk-2fa-setup.sh` - Bulk 2FA Setup
Configures TOTP 2FA for multiple AzerothCore accounts using official SOAP API.
```bash
# Setup 2FA for all accounts without it
./scripts/bash/bulk-2fa-setup.sh --all
# Setup for specific accounts
./scripts/bash/bulk-2fa-setup.sh --account user1 --account user2
# Force regenerate with custom issuer
./scripts/bash/bulk-2fa-setup.sh --all --force --issuer "MyServer"
# Preview what would be done
./scripts/bash/bulk-2fa-setup.sh --all --dry-run
# Use custom SOAP credentials
./scripts/bash/bulk-2fa-setup.sh --all --soap-user admin --soap-pass adminpass
# Show help / options
./scripts/bash/bulk-2fa-setup.sh --help
```
**Features:**
- **Official AzerothCore API Integration**: Uses SOAP commands instead of direct database manipulation
- Generates AzerothCore-compatible 16-character Base32 TOTP secrets (longer secrets are rejected by SOAP)
- Automatic account discovery or specific targeting
- QR code generation for authenticator apps
- Force regeneration of existing 2FA secrets
- Comprehensive output with setup instructions
- Safe dry-run mode for testing
- SOAP connectivity validation
- Proper error handling and validation
**Requirements:**
- AzerothCore worldserver with SOAP enabled (SOAP.Enabled = 1)
- SOAP port exposed on 7778 (SOAP.Port = 7878, mapped to external 7778)
- Remote Access enabled (Ra.Enable = 1) in worldserver.conf
- SOAP.IP = "0.0.0.0" for external connectivity
- GM account with sufficient privileges (gmlevel 3)
- Provide SOAP credentials explicitly via `--soap-user` and `--soap-pass` (these are required; no env fallback)
**Output Structure:**
```
./2fa-setup-TIMESTAMP/
├── qr-codes/ # QR code images for each account
├── setup-report.txt # Complete setup summary
├── console-commands.txt # Manual verification commands
└── secrets-backup.csv # Secure backup of all secrets
```
**Security Notes:**
- Generated QR codes and backup files contain sensitive TOTP secrets
- Distribute QR codes securely to users
- Delete or encrypt backup files after distribution
- TOTP secrets are also stored in AzerothCore database
#### `scripts/bash/generate-2fa-qr.sh` / `generate-2fa-qr.py` - Individual 2FA Setup
Generate QR codes for individual account 2FA setup.
> Tip: each script supports `-h/--help` to see all options.
```bash
# Generate QR code for single account
./scripts/bash/generate-2fa-qr.sh -u username
# Use custom issuer and output path
./scripts/bash/generate-2fa-qr.sh -u username -i "MyServer" -o /tmp/qr.png
# Use existing secret
./scripts/bash/generate-2fa-qr.sh -u username -s JBSWY3DPEHPK3PXP
# Show help / options
./scripts/bash/generate-2fa-qr.sh -h
```
> AzerothCore's SOAP endpoint only accepts 16-character Base32 secrets (A-Z and 2-7). The generators enforce this length to avoid "The provided two-factor authentication secret is not valid" errors.
#### `scripts/bash/test-2fa-token.py` - Generate TOTP Test Codes
Quickly verify a 16-character Base32 secret produces valid 6-digit codes.
```bash
# Show help
./scripts/bash/test-2fa-token.py --help
# Generate two consecutive codes for a secret
./scripts/bash/test-2fa-token.py -s JBSWY3DPEHPK3PXP -c 2
```
### Module Management Scripts ### Module Management Scripts
#### `scripts/bash/stage-modules.sh` - Module Staging #### `scripts/bash/stage-modules.sh` - Module Staging
@@ -274,6 +415,51 @@ Comprehensive deployment verification with health checks and service validation.
./scripts/bash/verify-deployment.sh --quick # Quick health check only ./scripts/bash/verify-deployment.sh --quick # Quick health check only
``` ```
#### `scripts/bash/validate-env.sh` - Environment Configuration Validator
Validates `.env` configuration for required and optional variables with detailed reporting.
```bash
./scripts/bash/validate-env.sh # Basic validation (required vars only)
./scripts/bash/validate-env.sh --strict # Validate required + optional vars
./scripts/bash/validate-env.sh --quiet # Errors only, suppress success messages
```
**Exit Codes:**
- `0` - All required variables present (and optional if --strict)
- `1` - Missing required variables
- `2` - Missing optional variables (only in --strict mode)
**Validates:**
- **Project Configuration:** `COMPOSE_PROJECT_NAME`, `NETWORK_NAME`
- **Repository URLs:** Standard and playerbots AzerothCore repositories
- **Storage Paths:** `STORAGE_PATH`, `STORAGE_PATH_LOCAL`, `MODULES_REBUILD_SOURCE_PATH`
- **Database Settings:** MySQL credentials, ports, database names
- **Container Config:** Container names and user permissions
- **Build Paths:** Module rebuild source paths (optional)
- **Performance Tuning:** MySQL buffer pool, InnoDB settings (optional)
- **Image References:** Docker image tags (optional)
**Use Cases:**
- Pre-deployment validation
- Troubleshooting configuration issues
- CI/CD pipeline checks
- Documentation of environment requirements
**Example Output:**
```
Validating environment configuration...
✅ Loaded environment from /path/to/.env
Checking required variables...
✅ COMPOSE_PROJECT_NAME=azerothcore-realmmaster
✅ NETWORK_NAME=azerothcore
✅ STORAGE_PATH=./storage
✅ MYSQL_ROOT_PASSWORD=********
✅ All required variables are set
✅ Environment validation passed ✨
```
### Backup System Scripts ### Backup System Scripts
#### `scripts/bash/backup-scheduler.sh` - Automated Backup Service #### `scripts/bash/backup-scheduler.sh` - Automated Backup Service

View File

@@ -75,7 +75,7 @@ services:
| Upstream Concept | RealmMaster Equivalent | Notes | | Upstream Concept | RealmMaster Equivalent | Notes |
| ---------------- | ---------------------- | ----- | | ---------------- | ---------------------- | ----- |
| MySQL container with bind-mounted storage | `ac-mysql` + `ac-storage-init` | Bind mounts live under `storage/` and `local-storage/`; tmpfs keeps runtime data fast and is checkpointed to disk automatically. | | MySQL container with bind-mounted storage | `ac-mysql` + `ac-storage-init` | Bind mounts live under `storage/` and `local-storage/`; tmpfs keeps runtime data fast and is synced to disk on graceful shutdown. |
| Manual DB import container | `ac-db-import` & `ac-db-init` | Automatically imports schemas or restores from backups; disable by skipping the `db` profile if you truly want manual control. | | Manual DB import container | `ac-db-import` & `ac-db-init` | Automatically imports schemas or restores from backups; disable by skipping the `db` profile if you truly want manual control. |
| World/Auth servers with optional DBC overrides | `ac-authserver-*` / `ac-worldserver-*` | Profile-based builds cover vanilla, playerbots, and custom module binaries. DBC overrides go into the shared client data mount just like upstream. | | World/Auth servers with optional DBC overrides | `ac-authserver-*` / `ac-worldserver-*` | Profile-based builds cover vanilla, playerbots, and custom module binaries. DBC overrides go into the shared client data mount just like upstream. |
| Client data bind mounts | `ac-client-data-standard` (or `-playerbots`) | Runs `scripts/bash/download-client-data.sh`, caches releases, and mounts them read-only into the worldserver. | | Client data bind mounts | `ac-client-data-standard` (or `-playerbots`) | Runs `scripts/bash/download-client-data.sh`, caches releases, and mounts them read-only into the worldserver. |

View File

@@ -7,7 +7,8 @@ This directory allows you to easily import custom database files and configurati
``` ```
import/ import/
├── db/ # Database SQL files to import ├── db/ # Database SQL files to import
── conf/ # Configuration file overrides ── conf/ # Configuration file overrides
└── pdumps/ # Character dump files to import
``` ```
## 🗄️ Database Import (`import/db/`) ## 🗄️ Database Import (`import/db/`)
@@ -93,6 +94,31 @@ AiPlayerbot.MaxRandomBots = 200
See `config/CONFIG_MANAGEMENT.md` for detailed preset documentation. See `config/CONFIG_MANAGEMENT.md` for detailed preset documentation.
## 🎮 Character Import (`import/pdumps/`)
Import character dump files from other AzerothCore servers.
### Supported Formats
- **`.pdump`** - Character dump files from `.pdump write` command
- **`.sql`** - SQL character dump files
### Quick Start
1. Place character dump files in `import/pdumps/`
2. Run the import script:
```bash
./scripts/bash/import-pdumps.sh --password your_mysql_password --account target_account
```
### Advanced Configuration
Create `import/pdumps/configs/filename.conf` for per-character settings:
```ini
account=target_account
name=NewCharacterName # Optional: rename
guid=5000 # Optional: force GUID
```
**📖 For complete character import documentation, see [import/pdumps/README.md](pdumps/README.md)**
## 🔄 Automated Import ## 🔄 Automated Import
Both database and configuration imports are automatically handled during: Both database and configuration imports are automatically handled during:
@@ -118,6 +144,7 @@ Both database and configuration imports are automatically handled during:
## 📚 Related Documentation ## 📚 Related Documentation
- [Character Import Guide](pdumps/README.md) - Complete pdump import documentation
- [Database Management](../docs/DATABASE_MANAGEMENT.md) - [Database Management](../docs/DATABASE_MANAGEMENT.md)
- [Configuration Management](../config/CONFIG_MANAGEMENT.md) - [Configuration Management](../config/CONFIG_MANAGEMENT.md)
- [Module Management](../docs/ADVANCED.md#module-management) - [Module Management](../docs/ADVANCED.md#module-management)

192
import/pdumps/README.md Normal file
View File

@@ -0,0 +1,192 @@
# Character PDump Import
This directory allows you to easily import character pdump files into your AzerothCore server.
## 📁 Directory Structure
```
import/pdumps/
├── README.md # This file
├── *.pdump # Place your character dump files here
├── *.sql # SQL dump files also supported
├── configs/ # Optional per-file configuration
│ ├── character1.conf
│ └── character2.conf
├── examples/ # Example files and configurations
└── processed/ # Successfully imported files are moved here
```
## 🎮 Character Dump Import
### Quick Start
1. **Place your pdump files** in this directory:
```bash
cp /path/to/mycharacter.pdump import/pdumps/
```
2. **Run the import script**:
```bash
./scripts/bash/import-pdumps.sh --password your_mysql_password --account target_account
```
3. **Login and play** - your characters are now available!
### Supported File Formats
- **`.pdump`** - Character dump files from AzerothCore `.pdump write` command
- **`.sql`** - SQL character dump files
### Configuration Options
#### Environment Variables (`.env`)
```bash
# Set default account for all imports
DEFAULT_IMPORT_ACCOUNT=testuser
# Database credentials (usually already set)
MYSQL_ROOT_PASSWORD=your_mysql_password
ACORE_DB_AUTH_NAME=acore_auth
ACORE_DB_CHARACTERS_NAME=acore_characters
```
#### Per-Character Configuration (`configs/filename.conf`)
Create a `.conf` file with the same name as your pdump file to specify custom import options:
**Example: `configs/mycharacter.conf`**
```ini
# Target account (required if not set globally)
account=testuser
# Rename character during import (optional)
name=NewCharacterName
# Force specific GUID (optional, auto-assigned if not specified)
guid=5000
```
### Command Line Usage
#### Import All Files
```bash
# Use environment settings
./scripts/bash/import-pdumps.sh
# Override settings
./scripts/bash/import-pdumps.sh --password mypass --account testuser
```
#### Import Single File
```bash
# Direct import with pdump-import.sh
./scripts/bash/pdump-import.sh --file character.pdump --account testuser --password mypass
# With character rename
./scripts/bash/pdump-import.sh --file oldchar.pdump --account newuser --name "NewName" --password mypass
# Validate before import (dry run)
./scripts/bash/pdump-import.sh --file character.pdump --account testuser --password mypass --dry-run
```
## 🛠️ Advanced Features
### Account Management
- **Account Validation**: Scripts automatically verify that target accounts exist
- **Account ID or Name**: You can use either account names or numeric IDs
- **Interactive Mode**: If no account is specified, you'll be prompted to enter one
### GUID Handling
- **Auto-Assignment**: Next available GUID is automatically assigned
- **Force GUID**: Use `--guid` parameter or config file to force specific GUID
- **Conflict Detection**: Import fails safely if GUID already exists
### Character Names
- **Validation**: Character names must follow WoW naming rules (2-12 letters)
- **Uniqueness**: Import fails if character name already exists on server
- **Renaming**: Use `--name` parameter or config file to rename during import
### Safety Features
- **Automatic Backup**: Characters database is backed up before each import
- **Server Management**: World server is safely stopped/restarted during import
- **Rollback Ready**: Backups are stored in `manual-backups/` directory
- **Dry Run**: Validate imports without actually importing
## 📋 Import Workflow
1. **Validation Phase**
- Check file format and readability
- Validate target account exists
- Verify character name availability (if specified)
- Check GUID conflicts
2. **Pre-Import Phase**
- Create automatic database backup
- Stop world server for safe import
3. **Processing Phase**
- Process SQL file (update account references, GUID, name)
- Import character data into database
4. **Post-Import Phase**
- Restart world server
- Verify import success
- Move processed files to `processed/` directory
## 🚨 Important Notes
### Before You Import
- **Backup Your Database**: Always backup before importing characters
- **Account Required**: Target account must exist in your auth database
- **Unique Names**: Character names must be unique across the entire server
- **Server Downtime**: World server is briefly restarted during import
### PDump Limitations
The AzerothCore pdump system has some known limitations:
- **Guild Data**: Guild information is not included in pdump files
- **Module Data**: Some module-specific data (transmog, reagent bank) may not transfer
- **Version Compatibility**: Pdump files from different database versions may have issues
### Troubleshooting
- **"Account not found"**: Verify account exists in auth database
- **"Character name exists"**: Use `--name` to rename or choose different name
- **"GUID conflicts"**: Use `--guid` to force different GUID or let system auto-assign
- **"Database errors"**: Check that pdump file is compatible with your database version
## 📚 Examples
### Basic Import
```bash
# Place file and import
cp character.pdump import/pdumps/
./scripts/bash/import-pdumps.sh --password mypass --account testuser
```
### Batch Import with Configuration
```bash
# Set up multiple characters
cp char1.pdump import/pdumps/
cp char2.pdump import/pdumps/
# Configure individual characters
echo "account=user1" > import/pdumps/configs/char1.conf
echo "account=user2
name=RenamedChar" > import/pdumps/configs/char2.conf
# Import all
./scripts/bash/import-pdumps.sh --password mypass
```
### Single Character Import
```bash
./scripts/bash/pdump-import.sh \
--file character.pdump \
--account testuser \
--name "MyNewCharacter" \
--password mypass
```
## 🔗 Related Documentation
- [Database Management](../../docs/DATABASE_MANAGEMENT.md)
- [Backup System](../../docs/TROUBLESHOOTING.md#backup-system)
- [Getting Started Guide](../../docs/GETTING_STARTED.md)

View File

@@ -0,0 +1,43 @@
#!/bin/bash
# Example batch import script
# This shows how to import multiple characters with different configurations
set -euo pipefail
MYSQL_PASSWORD="your_mysql_password_here"
echo "Setting up character import batch..."
# Create character-specific configurations
mkdir -p ../configs
# Character 1: Import to specific account
cat > ../configs/warrior.conf <<EOF
account=player1
EOF
# Character 2: Import with rename
cat > ../configs/mage.conf <<EOF
account=player2
name=NewMageName
EOF
# Character 3: Import with forced GUID
cat > ../configs/priest.conf <<EOF
account=player3
name=HolyPriest
guid=5000
EOF
echo "Configuration files created!"
echo ""
echo "Now place your pdump files:"
echo " warrior.pdump -> ../warrior.pdump"
echo " mage.pdump -> ../mage.pdump"
echo " priest.pdump -> ../priest.pdump"
echo ""
echo "Then run the import:"
echo " ../../../scripts/bash/import-pdumps.sh --password $MYSQL_PASSWORD"
echo ""
echo "Or import individually:"
echo " ../../../scripts/bash/pdump-import.sh --file ../warrior.pdump --account player1 --password $MYSQL_PASSWORD"

View File

@@ -0,0 +1,20 @@
# Example character import configuration
# Copy this file to configs/yourcharacter.conf and modify as needed
# Target account (required if DEFAULT_IMPORT_ACCOUNT is not set)
# Can be account name or account ID
account=testuser
# Rename character during import (optional)
# Must follow WoW naming rules: 2-12 letters, no numbers/special chars
name=NewCharacterName
# Force specific character GUID (optional)
# If not specified, next available GUID will be used automatically
# guid=5000
# Additional notes:
# - Account must exist in auth database before import
# - Character names must be unique across the server
# - GUID conflicts will cause import to fail
# - Use dry-run mode to test before actual import

584
scripts/bash/bulk-2fa-setup.sh Executable file
View File

@@ -0,0 +1,584 @@
#!/bin/bash
#
# AzerothCore Bulk 2FA Setup Script
# Generates and configures TOTP 2FA for multiple accounts
#
# Usage: ./scripts/bash/bulk-2fa-setup.sh [OPTIONS]
#
set -e
# Script directory for relative imports
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Source common utilities
source "$SCRIPT_DIR/lib/common.sh"
# Set environment paths
ENV_PATH="${ENV_PATH:-$PROJECT_ROOT/.env}"
DEFAULT_ENV_PATH="$PROJECT_ROOT/.env"
# =============================================================================
# GLOBAL VARIABLES
# =============================================================================
# Command line options
OPT_ALL=false
OPT_ACCOUNTS=()
OPT_FORCE=false
OPT_OUTPUT_DIR=""
OPT_DRY_RUN=false
OPT_ISSUER="AzerothCore"
OPT_FORMAT="qr"
# Container and database settings
WORLDSERVER_CONTAINER="ac-worldserver"
DATABASE_CONTAINER="ac-mysql"
MYSQL_PASSWORD=""
# SOAP settings for official AzerothCore API
SOAP_HOST="localhost"
SOAP_PORT="7778"
SOAP_USERNAME=""
SOAP_PASSWORD=""
# Output paths
OUTPUT_BASE_DIR=""
QR_CODES_DIR=""
SETUP_REPORT=""
CONSOLE_COMMANDS=""
SECRETS_BACKUP=""
# =============================================================================
# USAGE AND HELP
# =============================================================================
show_usage() {
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Bulk 2FA setup for AzerothCore accounts using official SOAP API"
echo ""
echo "Options:"
echo " --all Process all non-bot accounts without 2FA"
echo " --account USERNAME Process specific account (can be repeated)"
echo " --force Regenerate 2FA even if already exists"
echo " --output-dir PATH Custom output directory"
echo " --dry-run Show what would be done without executing"
echo " --issuer NAME Issuer name for TOTP (default: AzerothCore)"
echo " --format [qr|manual] Output QR codes or manual setup info"
echo " --soap-user USERNAME SOAP API username (required)"
echo " --soap-pass PASSWORD SOAP API password (required)"
echo " -h, --help Show this help message"
echo ""
echo "Examples:"
echo " $0 --all # Setup 2FA for all accounts"
echo " $0 --account user1 --account user2 # Setup for specific accounts"
echo " $0 --all --force --issuer MyServer # Force regenerate with custom issuer"
echo " $0 --all --dry-run # Preview what would be done"
echo ""
echo "Requirements:"
echo " - AzerothCore worldserver with SOAP enabled on port 7778"
echo " - GM account with sufficient privileges for SOAP access"
echo " - Remote Access (Ra.Enable = 1) enabled in worldserver.conf"
}
# =============================================================================
# UTILITY FUNCTIONS
# =============================================================================
# Check if required containers are running and healthy
check_containers() {
info "Checking container status..."
# Check worldserver container
if ! docker ps --format '{{.Names}}' | grep -q "^${WORLDSERVER_CONTAINER}$"; then
fatal "Container $WORLDSERVER_CONTAINER is not running"
fi
# Check if database container exists
if ! docker ps --format '{{.Names}}' | grep -q "^${DATABASE_CONTAINER}$"; then
fatal "Container $DATABASE_CONTAINER is not running"
fi
# Test database connectivity
if ! docker exec "$WORLDSERVER_CONTAINER" mysql -h "$DATABASE_CONTAINER" -u root -p"$MYSQL_PASSWORD" acore_auth -e "SELECT 1;" &>/dev/null; then
fatal "Cannot connect to AzerothCore database"
fi
# Test SOAP connectivity (only if credentials are available)
if [ -n "$SOAP_USERNAME" ] && [ -n "$SOAP_PASSWORD" ]; then
info "Testing SOAP API connectivity..."
if ! soap_result=$(soap_execute_command "server info"); then
fatal "Cannot connect to SOAP API: $soap_result"
fi
ok "SOAP API is accessible"
fi
ok "Containers are healthy and accessible"
}
# Execute MySQL query via container
mysql_query() {
local query="$1"
local database="${2:-acore_auth}"
docker exec "$WORLDSERVER_CONTAINER" mysql \
-h "$DATABASE_CONTAINER" \
-u root \
-p"$MYSQL_PASSWORD" \
"$database" \
-e "$query" \
2>/dev/null
}
# Execute SOAP command via AzerothCore official API
soap_execute_command() {
local command="$1"
local response
# Construct SOAP XML request
local soap_request='<?xml version="1.0" encoding="UTF-8"?>
<SOAP-ENV:Envelope
xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/"
xmlns:SOAP-ENC="http://schemas.xmlsoap.org/soap/encoding/"
xmlns:xsi="http://www.w3.org/1999/XMLSchema-instance"
xmlns:xsd="http://www.w3.org/1999/XMLSchema"
xmlns:ns1="urn:AC">
<SOAP-ENV:Body>
<ns1:executeCommand>
<command>'"$command"'</command>
</ns1:executeCommand>
</SOAP-ENV:Body>
</SOAP-ENV:Envelope>'
# Execute SOAP request
response=$(curl -s -X POST \
-H "Content-Type: text/xml" \
--user "$SOAP_USERNAME:$SOAP_PASSWORD" \
-d "$soap_request" \
"http://$SOAP_HOST:$SOAP_PORT/" 2>/dev/null)
# Flatten response for reliable parsing
local flat_response
flat_response=$(echo "$response" | tr -d '\n' | sed 's/\r//g')
# Check if response contains fault
if echo "$flat_response" | grep -q "SOAP-ENV:Fault"; then
# Extract fault string for error reporting
echo "$flat_response" | sed -n 's/.*<faultstring>\(.*\)<\/faultstring>.*/\1/p' | sed 's/&#xD;//g'
return 1
fi
# Extract successful result
echo "$flat_response" | sed -n 's/.*<result>\(.*\)<\/result>.*/\1/p' | sed 's/&#xD;//g'
return 0
}
# Generate Base32 TOTP secret
generate_totp_secret() {
# Use existing generation logic from generate-2fa-qr.sh
if command -v base32 >/dev/null 2>&1; then
openssl rand 10 | base32 -w0 | head -c16
else
# Fallback using Python
python3 -c "
import base64
import os
secret_bytes = os.urandom(10)
secret_b32 = base64.b32encode(secret_bytes).decode('ascii').rstrip('=')
print(secret_b32[:16])
"
fi
}
# Validate Base32 secret format
validate_base32_secret() {
local secret="$1"
if [[ ! "$secret" =~ ^[A-Z2-7]+$ ]]; then
return 1
fi
if [ ${#secret} -ne 16 ]; then
err "AzerothCore SOAP requires a 16-character Base32 secret (got ${#secret})"
return 1
fi
return 0
}
# =============================================================================
# ACCOUNT DISCOVERY FUNCTIONS
# =============================================================================
# Get all accounts that need 2FA setup
get_accounts_needing_2fa() {
local force="$1"
local query
if [ "$force" = "true" ]; then
# Include accounts that already have 2FA when force is enabled
query="SELECT username FROM account
WHERE username NOT LIKE 'rndbot%'
AND username NOT LIKE 'playerbot%'
ORDER BY username;"
else
# Only accounts without 2FA
query="SELECT username FROM account
WHERE (totp_secret IS NULL OR totp_secret = '')
AND username NOT LIKE 'rndbot%'
AND username NOT LIKE 'playerbot%'
ORDER BY username;"
fi
mysql_query "$query" | tail -n +2 # Remove header row
}
# Check if specific account exists
account_exists() {
local username="$1"
local result
result=$(mysql_query "SELECT COUNT(*) FROM account WHERE username = '$username';" | tail -n +2)
[ "$result" -eq 1 ]
}
# Check if account already has 2FA
account_has_2fa() {
local username="$1"
local result
result=$(mysql_query "SELECT COUNT(*) FROM account WHERE username = '$username' AND totp_secret IS NOT NULL AND totp_secret != '';" | tail -n +2)
[ "$result" -eq 1 ]
}
# =============================================================================
# 2FA SETUP FUNCTIONS
# =============================================================================
# Generate and set up 2FA for a single account
setup_2fa_for_account() {
local username="$1"
local force="$2"
local secret=""
local qr_output=""
info "Processing account: $username"
# Check if account exists
if ! account_exists "$username"; then
err "Account '$username' does not exist, skipping"
return 1
fi
# Check if account already has 2FA
if account_has_2fa "$username" && [ "$force" != "true" ]; then
warn "Account '$username' already has 2FA configured, use --force to regenerate"
return 0
fi
# Generate TOTP secret
secret=$(generate_totp_secret)
if [ -z "$secret" ] || ! validate_base32_secret "$secret"; then
err "Failed to generate valid TOTP secret for $username"
return 1
fi
if [ "$OPT_DRY_RUN" = "true" ]; then
log "DRY RUN: Would set 2FA secret for $username: $secret"
return 0
fi
# Set 2FA using official AzerothCore SOAP API
local soap_result
if ! soap_result=$(soap_execute_command ".account set 2fa $username $secret"); then
err "Failed to set 2FA for $username via SOAP API: $soap_result"
return 1
fi
# Verify success message
if ! echo "$soap_result" | grep -q "Successfully enabled two-factor authentication"; then
err "Unexpected SOAP response for $username: $soap_result"
return 1
fi
# Generate QR code if format is 'qr'
if [ "$OPT_FORMAT" = "qr" ]; then
qr_output="$QR_CODES_DIR/${username}_2fa_qr.png"
if ! "$SCRIPT_DIR/generate-2fa-qr.sh" -u "$username" -s "$secret" -i "$OPT_ISSUER" -o "$qr_output" >/dev/null; then
warn "Failed to generate QR code for $username, but secret was saved"
fi
fi
# Log setup information
echo "$username,$secret,$(date -u +"%Y-%m-%d %H:%M:%S UTC")" >> "$SECRETS_BACKUP"
echo "account set 2fa $username $secret" >> "$CONSOLE_COMMANDS"
ok "2FA configured for account: $username"
return 0
}
# =============================================================================
# OUTPUT AND REPORTING FUNCTIONS
# =============================================================================
# Create output directory structure
create_output_structure() {
local timestamp
timestamp=$(date +"%Y%m%d%H%M%S")
if [ -n "$OPT_OUTPUT_DIR" ]; then
OUTPUT_BASE_DIR="$OPT_OUTPUT_DIR"
else
OUTPUT_BASE_DIR="$PROJECT_ROOT/2fa-setup-$timestamp"
fi
# Create directories
mkdir -p "$OUTPUT_BASE_DIR"
QR_CODES_DIR="$OUTPUT_BASE_DIR/qr-codes"
mkdir -p "$QR_CODES_DIR"
# Set up output files
SETUP_REPORT="$OUTPUT_BASE_DIR/setup-report.txt"
CONSOLE_COMMANDS="$OUTPUT_BASE_DIR/console-commands.txt"
SECRETS_BACKUP="$OUTPUT_BASE_DIR/secrets-backup.csv"
# Initialize files
echo "# AzerothCore 2FA Console Commands" > "$CONSOLE_COMMANDS"
echo "# Generated on $(date)" >> "$CONSOLE_COMMANDS"
echo "" >> "$CONSOLE_COMMANDS"
echo "username,secret,generated_date" > "$SECRETS_BACKUP"
info "Output directory: $OUTPUT_BASE_DIR"
}
# Generate final setup report
generate_setup_report() {
local total_processed="$1"
local successful="$2"
local failed="$3"
{
echo "AzerothCore Bulk 2FA Setup Report"
echo "================================="
echo ""
echo "Generated: $(date)"
echo "Command: $0 $*"
echo ""
echo "Summary:"
echo "--------"
echo "Total accounts processed: $total_processed"
echo "Successfully configured: $successful"
echo "Failed: $failed"
echo ""
echo "Output Files:"
echo "-------------"
echo "- QR Codes: $QR_CODES_DIR/"
echo "- Console Commands: $CONSOLE_COMMANDS"
echo "- Secrets Backup: $SECRETS_BACKUP"
echo ""
echo "Next Steps:"
echo "-----------"
echo "1. Distribute QR codes to users securely"
echo "2. Users scan QR codes with authenticator apps"
echo "3. Verify setup using console commands if needed"
echo "4. Store secrets backup securely and delete when no longer needed"
echo ""
echo "Security Notes:"
echo "--------------"
echo "- QR codes contain sensitive TOTP secrets"
echo "- Secrets backup file contains plaintext secrets"
echo "- Delete or encrypt these files after distribution"
echo "- Secrets are also stored in AzerothCore database"
} > "$SETUP_REPORT"
info "Setup report generated: $SETUP_REPORT"
}
# =============================================================================
# MAIN SCRIPT LOGIC
# =============================================================================
# Parse command line arguments
parse_arguments() {
while [[ $# -gt 0 ]]; do
case $1 in
--all)
OPT_ALL=true
shift
;;
--account)
if [ -z "$2" ]; then
fatal "Option --account requires a username argument"
fi
OPT_ACCOUNTS+=("$2")
shift 2
;;
--force)
OPT_FORCE=true
shift
;;
--output-dir)
if [ -z "$2" ]; then
fatal "Option --output-dir requires a path argument"
fi
OPT_OUTPUT_DIR="$2"
shift 2
;;
--dry-run)
OPT_DRY_RUN=true
shift
;;
--issuer)
if [ -z "$2" ]; then
fatal "Option --issuer requires a name argument"
fi
OPT_ISSUER="$2"
shift 2
;;
--format)
if [ -z "$2" ]; then
fatal "Option --format requires qr or manual"
fi
if [[ "$2" != "qr" && "$2" != "manual" ]]; then
fatal "Format must be 'qr' or 'manual'"
fi
OPT_FORMAT="$2"
shift 2
;;
--soap-user)
if [ -z "$2" ]; then
fatal "Option --soap-user requires a username argument"
fi
SOAP_USERNAME="$2"
shift 2
;;
--soap-pass)
if [ -z "$2" ]; then
fatal "Option --soap-pass requires a password argument"
fi
SOAP_PASSWORD="$2"
shift 2
;;
-h|--help)
show_usage
exit 0
;;
*)
fatal "Unknown option: $1"
;;
esac
done
}
# Main execution function
main() {
local accounts_to_process=()
local total_processed=0
local successful=0
local failed=0
# Show help if no arguments were provided
if [ $# -eq 0 ]; then
show_usage
exit 1
fi
# Parse arguments
parse_arguments "$@"
# Validate options
if [ "$OPT_ALL" = "false" ] && [ ${#OPT_ACCOUNTS[@]} -eq 0 ]; then
fatal "Must specify either --all or --account USERNAME"
fi
if [ "$OPT_ALL" = "true" ] && [ ${#OPT_ACCOUNTS[@]} -gt 0 ]; then
fatal "Cannot use --all with specific --account options"
fi
# Load environment variables
MYSQL_PASSWORD=$(read_env "MYSQL_ROOT_PASSWORD" "")
if [ -z "$MYSQL_PASSWORD" ]; then
fatal "MYSQL_ROOT_PASSWORD not found in environment"
fi
# Require SOAP credentials via CLI flags
if [ -z "$SOAP_USERNAME" ] || [ -z "$SOAP_PASSWORD" ]; then
fatal "SOAP credentials required. Provide --soap-user and --soap-pass."
fi
# Check container health
check_containers
# Create output structure
create_output_structure
# Determine accounts to process
if [ "$OPT_ALL" = "true" ]; then
info "Discovering accounts that need 2FA setup..."
readarray -t accounts_to_process < <(get_accounts_needing_2fa "$OPT_FORCE")
if [ ${#accounts_to_process[@]} -eq 0 ]; then
if [ "$OPT_FORCE" = "true" ]; then
warn "No accounts found in database"
else
ok "All accounts already have 2FA configured"
fi
exit 0
fi
info "Found ${#accounts_to_process[@]} accounts to process"
else
accounts_to_process=("${OPT_ACCOUNTS[@]}")
fi
# Display dry run information
if [ "$OPT_DRY_RUN" = "true" ]; then
warn "DRY RUN MODE - No changes will be made"
info "Would process the following accounts:"
for account in "${accounts_to_process[@]}"; do
echo " - $account"
done
echo ""
fi
# Process each account
info "Processing ${#accounts_to_process[@]} accounts..."
for account in "${accounts_to_process[@]}"; do
total_processed=$((total_processed + 1))
if setup_2fa_for_account "$account" "$OPT_FORCE"; then
successful=$((successful + 1))
else
failed=$((failed + 1))
fi
done
# Generate final report
if [ "$OPT_DRY_RUN" = "false" ]; then
generate_setup_report "$total_processed" "$successful" "$failed"
# Summary
echo ""
ok "Bulk 2FA setup completed"
info "Processed: $total_processed accounts"
info "Successful: $successful"
info "Failed: $failed"
info "Output directory: $OUTPUT_BASE_DIR"
if [ "$failed" -gt 0 ]; then
warn "Some accounts failed to process. Check the output for details."
exit 1
fi
else
info "Dry run completed. Use without --dry-run to execute."
if [ "$failed" -gt 0 ]; then
warn "Some accounts would fail to process."
exit 1
fi
fi
}
# Execute main function with all arguments
main "$@"

View File

@@ -219,10 +219,12 @@ if [ -z "$backup_path" ]; then
echo "📦 Latest daily backup found: $latest_daily" echo "📦 Latest daily backup found: $latest_daily"
for backup_file in "$BACKUP_DIRS/daily/$latest_daily"/*.sql.gz; do for backup_file in "$BACKUP_DIRS/daily/$latest_daily"/*.sql.gz; do
if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then
if timeout 10 zcat "$backup_file" 2>/dev/null | head -20 | grep -q "CREATE DATABASE\|INSERT INTO\|CREATE TABLE"; then if timeout 10 gzip -t "$backup_file" >/dev/null 2>&1; then
echo "✅ Valid daily backup file: $(basename "$backup_file")" echo "✅ Valid daily backup file: $(basename "$backup_file")"
backup_path="$BACKUP_DIRS/daily/$latest_daily" backup_path="$BACKUP_DIRS/daily/$latest_daily"
break 2 break 2
else
echo "⚠️ gzip validation failed for $(basename "$backup_file")"
fi fi
fi fi
done done
@@ -237,10 +239,12 @@ if [ -z "$backup_path" ]; then
echo "📦 Latest hourly backup found: $latest_hourly" echo "📦 Latest hourly backup found: $latest_hourly"
for backup_file in "$BACKUP_DIRS/hourly/$latest_hourly"/*.sql.gz; do for backup_file in "$BACKUP_DIRS/hourly/$latest_hourly"/*.sql.gz; do
if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then
if timeout 10 zcat "$backup_file" >/dev/null 2>&1; then if timeout 10 gzip -t "$backup_file" >/dev/null 2>&1; then
echo "✅ Valid hourly backup file: $(basename "$backup_file")" echo "✅ Valid hourly backup file: $(basename "$backup_file")"
backup_path="$BACKUP_DIRS/hourly/$latest_hourly" backup_path="$BACKUP_DIRS/hourly/$latest_hourly"
break 2 break 2
else
echo "⚠️ gzip validation failed for $(basename "$backup_file")"
fi fi
fi fi
done done
@@ -259,10 +263,12 @@ if [ -z "$backup_path" ]; then
echo "🔍 Validating timestamped backup content..." echo "🔍 Validating timestamped backup content..."
for backup_file in "$BACKUP_DIRS/$latest_timestamped"/*.sql.gz; do for backup_file in "$BACKUP_DIRS/$latest_timestamped"/*.sql.gz; do
if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then if [ -f "$backup_file" ] && [ -s "$backup_file" ]; then
if timeout 10 zcat "$backup_file" >/dev/null 2>&1; then if timeout 10 gzip -t "$backup_file" >/dev/null 2>&1; then
echo "✅ Valid timestamped backup found: $(basename "$backup_file")" echo "✅ Valid timestamped backup found: $(basename "$backup_file")"
backup_path="$BACKUP_DIRS/$latest_timestamped" backup_path="$BACKUP_DIRS/$latest_timestamped"
break 2 break 2
else
echo "⚠️ gzip validation failed for $(basename "$backup_file")"
fi fi
fi fi
done done
@@ -443,7 +449,7 @@ if [ -n "$backup_path" ]; then
echo "⚠️ Backup restoration failed, will proceed with fresh database setup" echo "⚠️ Backup restoration failed, will proceed with fresh database setup"
fi fi
else else
echo " No valid backups found - proceeding with fresh setup" echo " No valid SQL backups found - proceeding with fresh setup"
echo "$(date): No backup found - fresh setup needed" > "$RESTORE_FAILED_MARKER" echo "$(date): No backup found - fresh setup needed" > "$RESTORE_FAILED_MARKER"
fi fi

116
scripts/bash/generate-2fa-qr.py Executable file
View File

@@ -0,0 +1,116 @@
#!/usr/bin/env python3
"""
AzerothCore 2FA QR Code Generator (Python version)
Generates TOTP secrets and QR codes for AzerothCore accounts
"""
import argparse
import base64
import os
import sys
import re
def validate_base32(secret):
"""Validate Base32 secret format"""
if not re.match(r'^[A-Z2-7]+$', secret):
print("Error: Invalid Base32 secret. Only A-Z and 2-7 characters allowed.", file=sys.stderr)
return False
if len(secret) != 16:
print(f"Error: AzerothCore SOAP requires a 16-character Base32 secret (got {len(secret)}).", file=sys.stderr)
return False
return True
def generate_secret():
"""Generate a random 16-character Base32 secret (AzerothCore SOAP requirement)"""
secret_bytes = os.urandom(10)
secret_b32 = base64.b32encode(secret_bytes).decode('ascii').rstrip('=')
return secret_b32[:16]
def generate_qr_code(uri, output_path):
"""Generate QR code using available library"""
try:
import qrcode
qr = qrcode.QRCode(
version=1,
error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=6,
border=4,
)
qr.add_data(uri)
qr.make(fit=True)
img = qr.make_image(fill_color="black", back_color="white")
img.save(output_path)
return True
except ImportError:
print("Error: qrcode library not installed.", file=sys.stderr)
print("Install it with: pip3 install qrcode[pil]", file=sys.stderr)
return False
def main():
parser = argparse.ArgumentParser(
description="Generate TOTP secrets and QR codes for AzerothCore 2FA",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
%(prog)s -u john_doe
%(prog)s -u john_doe -o /tmp/qr.png
%(prog)s -u john_doe -s JBSWY3DPEHPK3PXP -i MyServer
"""
)
parser.add_argument('-u', '--username', required=True,
help='Target username for 2FA setup')
parser.add_argument('-o', '--output',
help='Path to save QR code image (default: ./USERNAME_2fa_qr.png)')
parser.add_argument('-s', '--secret',
help='Use existing 16-character Base32 secret (generates random if not provided)')
parser.add_argument('-i', '--issuer', default='AzerothCore',
help='Issuer name for the TOTP entry (default: AzerothCore)')
args = parser.parse_args()
# Set default output path
if not args.output:
args.output = f"./{args.username}_2fa_qr.png"
# Generate or validate secret
if args.secret:
print("Using provided secret...")
if not validate_base32(args.secret):
sys.exit(1)
secret = args.secret
else:
print("Generating new TOTP secret...")
secret = generate_secret()
print(f"Generated secret: {secret}")
# Create TOTP URI
uri = f"otpauth://totp/{args.issuer}:{args.username}?secret={secret}&issuer={args.issuer}"
# Generate QR code
print("Generating QR code...")
if generate_qr_code(uri, args.output):
print(f"✓ QR code generated successfully: {args.output}")
else:
print("\nManual setup information:")
print(f"Secret: {secret}")
print(f"URI: {uri}")
sys.exit(1)
# Display setup information
print("\n=== AzerothCore 2FA Setup Information ===")
print(f"Username: {args.username}")
print(f"Secret: {secret}")
print(f"QR Code: {args.output}")
print(f"Issuer: {args.issuer}")
print("\nNext steps:")
print("1. Share the QR code image with the user")
print("2. User scans QR code with authenticator app")
print("3. Run on AzerothCore console:")
print(f" account set 2fa {args.username} {secret}")
print("4. User can now use 6-digit codes for login")
print("\nSecurity Note: Keep the secret secure and delete the QR code after setup.")
if __name__ == "__main__":
main()

166
scripts/bash/generate-2fa-qr.sh Executable file
View File

@@ -0,0 +1,166 @@
#!/bin/bash
# AzerothCore 2FA QR Code Generator
# Generates TOTP secrets and QR codes for AzerothCore accounts
set -e
# Color codes for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Function to display usage
show_usage() {
echo "Usage: $0 -u USERNAME [-o OUTPUT_PATH] [-s SECRET] [-i ISSUER]"
echo ""
echo "Options:"
echo " -u USERNAME Target username for 2FA setup (required)"
echo " -o OUTPUT_PATH Path to save QR code image (default: ./USERNAME_2fa_qr.png)"
echo " -s SECRET Use existing 16-character Base32 secret (generates random if not provided)"
echo " -i ISSUER Issuer name for the TOTP entry (default: AzerothCore)"
echo " -h Show this help message"
echo ""
echo "Examples:"
echo " $0 -u john_doe"
echo " $0 -u john_doe -o /tmp/qr.png"
echo " $0 -u john_doe -s JBSWY3DPEHPK3PXP -i MyServer"
}
# Function to validate Base32
validate_base32() {
local secret="$1"
if [[ ! "$secret" =~ ^[A-Z2-7]+$ ]]; then
echo -e "${RED}Error: Invalid Base32 secret. Only A-Z and 2-7 characters allowed.${NC}" >&2
return 1
fi
if [ ${#secret} -ne 16 ]; then
echo -e "${RED}Error: AzerothCore SOAP requires a 16-character Base32 secret (got ${#secret}).${NC}" >&2
return 1
fi
}
# Function to generate Base32 secret
generate_secret() {
# Generate 10 random bytes and encode as 16-character Base32 (AzerothCore SOAP requirement)
if command -v base32 >/dev/null 2>&1; then
openssl rand 10 | base32 -w0 | head -c16
else
# Fallback using Python if base32 command not available
python3 -c "
import base64
import os
secret_bytes = os.urandom(10)
secret_b32 = base64.b32encode(secret_bytes).decode('ascii').rstrip('=')
print(secret_b32[:16])
"
fi
}
# Default values
USERNAME=""
OUTPUT_PATH=""
SECRET=""
ISSUER="AzerothCore"
# Parse command line arguments
while getopts "u:o:s:i:h" opt; do
case ${opt} in
u )
USERNAME="$OPTARG"
;;
o )
OUTPUT_PATH="$OPTARG"
;;
s )
SECRET="$OPTARG"
;;
i )
ISSUER="$OPTARG"
;;
h )
show_usage
exit 0
;;
\? )
echo -e "${RED}Invalid option: $OPTARG${NC}" 1>&2
show_usage
exit 1
;;
: )
echo -e "${RED}Invalid option: $OPTARG requires an argument${NC}" 1>&2
show_usage
exit 1
;;
esac
done
# Validate required parameters
if [ -z "$USERNAME" ]; then
echo -e "${RED}Error: Username is required.${NC}" >&2
show_usage
exit 1
fi
# Set default output path if not provided
if [ -z "$OUTPUT_PATH" ]; then
OUTPUT_PATH="./${USERNAME}_2fa_qr.png"
fi
# Generate secret if not provided
if [ -z "$SECRET" ]; then
echo -e "${BLUE}Generating new TOTP secret...${NC}"
SECRET=$(generate_secret)
if [ -z "$SECRET" ]; then
echo -e "${RED}Error: Failed to generate secret.${NC}" >&2
exit 1
fi
echo -e "${GREEN}Generated secret: $SECRET${NC}"
else
echo -e "${BLUE}Using provided secret...${NC}"
if ! validate_base32 "$SECRET"; then
exit 1
fi
fi
# Create TOTP URI
URI="otpauth://totp/${ISSUER}:${USERNAME}?secret=${SECRET}&issuer=${ISSUER}"
# Check if qrencode is available
if ! command -v qrencode >/dev/null 2>&1; then
echo -e "${RED}Error: qrencode is not installed.${NC}" >&2
echo "Install it with: sudo apt-get install qrencode (Ubuntu/Debian) or brew install qrencode (macOS)"
echo ""
echo -e "${BLUE}Manual setup information:${NC}"
echo "Secret: $SECRET"
echo "URI: $URI"
exit 1
fi
# Generate QR code
echo -e "${BLUE}Generating QR code...${NC}"
if echo "$URI" | qrencode -s 6 -o "$OUTPUT_PATH"; then
echo -e "${GREEN}✓ QR code generated successfully: $OUTPUT_PATH${NC}"
else
echo -e "${RED}Error: Failed to generate QR code.${NC}" >&2
exit 1
fi
# Display setup information
echo ""
echo -e "${YELLOW}=== AzerothCore 2FA Setup Information ===${NC}"
echo "Username: $USERNAME"
echo "Secret: $SECRET"
echo "QR Code: $OUTPUT_PATH"
echo "Issuer: $ISSUER"
echo ""
echo -e "${BLUE}Next steps:${NC}"
echo "1. Share the QR code image with the user"
echo "2. User scans QR code with authenticator app"
echo "3. Run on AzerothCore console:"
echo -e " ${GREEN}account set 2fa $USERNAME $SECRET${NC}"
echo "4. User can now use 6-digit codes for login"
echo ""
echo -e "${YELLOW}Security Note: Keep the secret secure and delete the QR code after setup.${NC}"

283
scripts/bash/import-pdumps.sh Executable file
View File

@@ -0,0 +1,283 @@
#!/bin/bash
# Process and import character pdump files from import/pdumps/ directory
set -euo pipefail
INVOCATION_DIR="$PWD"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR/../.." # Go to project root
COLOR_RED='\033[0;31m'
COLOR_GREEN='\033[0;32m'
COLOR_YELLOW='\033[1;33m'
COLOR_BLUE='\033[0;34m'
COLOR_RESET='\033[0m'
log(){ printf '%b\n' "${COLOR_GREEN}$*${COLOR_RESET}"; }
warn(){ printf '%b\n' "${COLOR_YELLOW}$*${COLOR_RESET}"; }
err(){ printf '%b\n' "${COLOR_RED}$*${COLOR_RESET}"; }
info(){ printf '%b\n' "${COLOR_BLUE}$*${COLOR_RESET}"; }
fatal(){ err "$*"; exit 1; }
# Source environment variables
if [ -f ".env" ]; then
set -a
source .env
set +a
fi
IMPORT_DIR="./import/pdumps"
MYSQL_PW="${MYSQL_ROOT_PASSWORD:-}"
AUTH_DB="${ACORE_DB_AUTH_NAME:-acore_auth}"
CHARACTERS_DB="${ACORE_DB_CHARACTERS_NAME:-acore_characters}"
DEFAULT_ACCOUNT="${DEFAULT_IMPORT_ACCOUNT:-}"
INTERACTIVE=${INTERACTIVE:-true}
usage(){
cat <<'EOF'
Usage: ./import-pdumps.sh [options]
Automatically process and import all character pdump files from import/pdumps/ directory.
Options:
--password PASS MySQL root password (overrides env)
--account ACCOUNT Default account for imports (overrides env)
--auth-db NAME Auth database name (overrides env)
--characters-db NAME Characters database name (overrides env)
--non-interactive Don't prompt for missing information
-h, --help Show this help and exit
Directory Structure:
import/pdumps/
├── character1.pdump # Will be imported with default settings
├── character2.sql # SQL dump files also supported
└── configs/ # Optional: per-file configuration
├── character1.conf # account=testuser, name=NewName
└── character2.conf # account=12345, guid=5000
Configuration File Format (.conf):
account=target_account_name_or_id
name=new_character_name # Optional: rename character
guid=force_specific_guid # Optional: force GUID
Environment Variables:
MYSQL_ROOT_PASSWORD # MySQL root password
DEFAULT_IMPORT_ACCOUNT # Default account for imports
ACORE_DB_AUTH_NAME # Auth database name
ACORE_DB_CHARACTERS_NAME # Characters database name
Examples:
# Import all pdumps with environment settings
./import-pdumps.sh
# Import with specific password and account
./import-pdumps.sh --password mypass --account testuser
EOF
}
check_dependencies(){
if ! docker ps >/dev/null 2>&1; then
fatal "Docker is not running or accessible"
fi
if ! docker exec ac-mysql mysql --version >/dev/null 2>&1; then
fatal "MySQL container (ac-mysql) is not running or accessible"
fi
}
parse_config_file(){
local config_file="$1"
local -A config=()
if [[ -f "$config_file" ]]; then
while IFS='=' read -r key value; do
# Skip comments and empty lines
[[ "$key" =~ ^[[:space:]]*# ]] && continue
[[ -z "$key" ]] && continue
# Remove leading/trailing whitespace
key=$(echo "$key" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
value=$(echo "$value" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
config["$key"]="$value"
done < "$config_file"
fi
# Export as variables for the calling function
export CONFIG_ACCOUNT="${config[account]:-}"
export CONFIG_NAME="${config[name]:-}"
export CONFIG_GUID="${config[guid]:-}"
}
prompt_for_account(){
local filename="$1"
if [[ "$INTERACTIVE" != "true" ]]; then
fatal "No account specified for $filename and running in non-interactive mode"
fi
echo ""
warn "No account specified for: $filename"
echo "Available options:"
echo " 1. Provide account name or ID"
echo " 2. Skip this file"
echo ""
while true; do
read -p "Enter account name/ID (or 'skip'): " account_input
case "$account_input" in
skip|Skip|SKIP)
return 1
;;
"")
warn "Please enter an account name/ID or 'skip'"
continue
;;
*)
echo "$account_input"
return 0
;;
esac
done
}
process_pdump_file(){
local pdump_file="$1"
local filename
filename=$(basename "$pdump_file")
local config_file="$IMPORT_DIR/configs/${filename%.*}.conf"
info "Processing: $filename"
# Parse configuration file if it exists
parse_config_file "$config_file"
# Determine account
local target_account="${CONFIG_ACCOUNT:-$DEFAULT_ACCOUNT}"
if [[ -z "$target_account" ]]; then
if ! target_account=$(prompt_for_account "$filename"); then
warn "Skipping $filename (no account provided)"
return 0
fi
fi
# Build command arguments
local cmd_args=(
--file "$pdump_file"
--account "$target_account"
--password "$MYSQL_PW"
--auth-db "$AUTH_DB"
--characters-db "$CHARACTERS_DB"
)
# Add optional parameters if specified in config
[[ -n "$CONFIG_NAME" ]] && cmd_args+=(--name "$CONFIG_NAME")
[[ -n "$CONFIG_GUID" ]] && cmd_args+=(--guid "$CONFIG_GUID")
log "Importing $filename to account $target_account"
[[ -n "$CONFIG_NAME" ]] && log " Character name: $CONFIG_NAME"
[[ -n "$CONFIG_GUID" ]] && log " Forced GUID: $CONFIG_GUID"
# Execute the import
if "./scripts/bash/pdump-import.sh" "${cmd_args[@]}"; then
log "✅ Successfully imported: $filename"
# Move processed file to processed/ subdirectory
local processed_dir="$IMPORT_DIR/processed"
mkdir -p "$processed_dir"
mv "$pdump_file" "$processed_dir/"
[[ -f "$config_file" ]] && mv "$config_file" "$processed_dir/"
else
err "❌ Failed to import: $filename"
return 1
fi
}
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--password)
[[ $# -ge 2 ]] || fatal "--password requires a value"
MYSQL_PW="$2"
shift 2
;;
--account)
[[ $# -ge 2 ]] || fatal "--account requires a value"
DEFAULT_ACCOUNT="$2"
shift 2
;;
--auth-db)
[[ $# -ge 2 ]] || fatal "--auth-db requires a value"
AUTH_DB="$2"
shift 2
;;
--characters-db)
[[ $# -ge 2 ]] || fatal "--characters-db requires a value"
CHARACTERS_DB="$2"
shift 2
;;
--non-interactive)
INTERACTIVE=false
shift
;;
-h|--help)
usage
exit 0
;;
*)
fatal "Unknown option: $1"
;;
esac
done
# Validate required parameters
[[ -n "$MYSQL_PW" ]] || fatal "MySQL password required (use --password or set MYSQL_ROOT_PASSWORD)"
# Check dependencies
check_dependencies
# Check if import directory exists and has files
if [[ ! -d "$IMPORT_DIR" ]]; then
info "Import directory doesn't exist: $IMPORT_DIR"
info "Create the directory and place your .pdump or .sql files there."
exit 0
fi
# Find pdump files
shopt -s nullglob
pdump_files=("$IMPORT_DIR"/*.pdump "$IMPORT_DIR"/*.sql)
shopt -u nullglob
if [[ ${#pdump_files[@]} -eq 0 ]]; then
info "No pdump files found in $IMPORT_DIR"
info "Place your .pdump or .sql files in this directory to import them."
exit 0
fi
log "Found ${#pdump_files[@]} pdump file(s) to process"
# Create configs directory if it doesn't exist
mkdir -p "$IMPORT_DIR/configs"
# Process each file
processed=0
failed=0
for pdump_file in "${pdump_files[@]}"; do
if process_pdump_file "$pdump_file"; then
((processed++))
else
((failed++))
fi
done
echo ""
log "Import summary:"
log " ✅ Processed: $processed"
[[ $failed -gt 0 ]] && err " ❌ Failed: $failed"
if [[ $processed -gt 0 ]]; then
log ""
log "Character imports completed! Processed files moved to $IMPORT_DIR/processed/"
log "You can now log in and access your imported characters."
fi

View File

@@ -141,6 +141,10 @@ run_post_install_hooks(){
export MODULES_ROOT="${MODULES_ROOT:-/modules}" export MODULES_ROOT="${MODULES_ROOT:-/modules}"
export LUA_SCRIPTS_TARGET="/azerothcore/lua_scripts" export LUA_SCRIPTS_TARGET="/azerothcore/lua_scripts"
# Pass build environment variables to hooks
export STACK_SOURCE_VARIANT="${STACK_SOURCE_VARIANT:-}"
export MODULES_REBUILD_SOURCE_PATH="${MODULES_REBUILD_SOURCE_PATH:-}"
# Execute the hook script # Execute the hook script
if "$hook_script"; then if "$hook_script"; then
ok "Hook '$hook' completed successfully" ok "Hook '$hook' completed successfully"
@@ -174,7 +178,18 @@ install_enabled_modules(){
continue continue
fi fi
if [ -d "$dir/.git" ]; then if [ -d "$dir/.git" ]; then
info "$dir already present; skipping clone" info "$dir already present; checking for updates"
(cd "$dir" && git fetch origin >/dev/null 2>&1 || warn "Failed to fetch updates for $dir")
local current_branch
current_branch=$(cd "$dir" && git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "master")
if (cd "$dir" && git pull origin "$current_branch" 2>&1 | grep -q "Already up to date"); then
info "$dir is already up to date"
else
ok "$dir updated from remote"
fi
if [ -n "$ref" ]; then
(cd "$dir" && git checkout "$ref") || warn "Unable to checkout ref $ref for $dir"
fi
elif [ -d "$dir" ]; then elif [ -d "$dir" ]; then
warn "$dir exists but is not a git repository; leaving in place" warn "$dir exists but is not a git repository; leaving in place"
else else
@@ -467,6 +482,7 @@ load_sql_helper(){
# Module SQL is now staged at runtime by stage-modules.sh which copies files to # Module SQL is now staged at runtime by stage-modules.sh which copies files to
# /azerothcore/data/sql/updates/ (core directory) where they ARE scanned and processed. # /azerothcore/data/sql/updates/ (core directory) where they ARE scanned and processed.
track_module_state(){ track_module_state(){
echo 'Checking for module changes that require rebuild...' echo 'Checking for module changes that require rebuild...'

View File

@@ -11,66 +11,66 @@ if ! command -v "$ORIGINAL_ENTRYPOINT" >/dev/null 2>&1; then
fi fi
TARGET_SPEC="${MYSQL_RUNTIME_USER:-${CONTAINER_USER:-}}" TARGET_SPEC="${MYSQL_RUNTIME_USER:-${CONTAINER_USER:-}}"
if [ -z "${TARGET_SPEC:-}" ] || [ "${TARGET_SPEC}" = "0:0" ]; then
exec "$ORIGINAL_ENTRYPOINT" "$@"
fi
if [[ "$TARGET_SPEC" != *:* ]]; then
echo "mysql-entrypoint: Expected MYSQL_RUNTIME_USER/CONTAINER_USER in uid:gid form, got '${TARGET_SPEC}'" >&2
exit 1
fi
IFS=':' read -r TARGET_UID TARGET_GID <<< "$TARGET_SPEC"
if ! [[ "$TARGET_UID" =~ ^[0-9]+$ ]] || ! [[ "$TARGET_GID" =~ ^[0-9]+$ ]]; then
echo "mysql-entrypoint: UID/GID must be numeric (received uid='${TARGET_UID}' gid='${TARGET_GID}')" >&2
exit 1
fi
if ! id mysql >/dev/null 2>&1; then
echo "mysql-entrypoint: mysql user not found in container" >&2
exit 1
fi
current_uid="$(id -u mysql)"
current_gid="$(id -g mysql)"
# Adjust group if needed
target_group_name="" target_group_name=""
if [ "$current_gid" != "$TARGET_GID" ]; then if [ -n "${TARGET_SPEC:-}" ] && [ "${TARGET_SPEC}" != "0:0" ]; then
if groupmod -g "$TARGET_GID" mysql 2>/dev/null; then if [[ "$TARGET_SPEC" != *:* ]]; then
target_group_name="mysql" echo "mysql-entrypoint: Expected MYSQL_RUNTIME_USER/CONTAINER_USER in uid:gid form, got '${TARGET_SPEC}'" >&2
else
existing_group="$(getent group "$TARGET_GID" | cut -d: -f1 || true)"
if [ -z "$existing_group" ]; then
existing_group="mysql-host"
if ! getent group "$existing_group" >/dev/null 2>&1; then
groupadd -g "$TARGET_GID" "$existing_group"
fi
fi
usermod -g "$existing_group" mysql
target_group_name="$existing_group"
fi
else
target_group_name="$(getent group mysql | cut -d: -f1)"
fi
if [ -z "$target_group_name" ]; then
target_group_name="$(getent group "$TARGET_GID" | cut -d: -f1 || true)"
fi
# Adjust user UID if needed
if [ "$current_uid" != "$TARGET_UID" ]; then
if getent passwd "$TARGET_UID" >/dev/null 2>&1 && [ "$(getent passwd "$TARGET_UID" | cut -d: -f1)" != "mysql" ]; then
echo "mysql-entrypoint: UID ${TARGET_UID} already in use by $(getent passwd "$TARGET_UID" | cut -d: -f1)." >&2
echo "mysql-entrypoint: Please choose a different CONTAINER_USER or adjust the image." >&2
exit 1 exit 1
fi fi
usermod -u "$TARGET_UID" mysql
fi
# Ensure group lookup after potential changes IFS=':' read -r TARGET_UID TARGET_GID <<< "$TARGET_SPEC"
target_group_name="$(getent group "$TARGET_GID" | cut -d: -f1 || echo "$target_group_name")"
if ! [[ "$TARGET_UID" =~ ^[0-9]+$ ]] || ! [[ "$TARGET_GID" =~ ^[0-9]+$ ]]; then
echo "mysql-entrypoint: UID/GID must be numeric (received uid='${TARGET_UID}' gid='${TARGET_GID}')" >&2
exit 1
fi
if ! id mysql >/dev/null 2>&1; then
echo "mysql-entrypoint: mysql user not found in container" >&2
exit 1
fi
current_uid="$(id -u mysql)"
current_gid="$(id -g mysql)"
# Adjust group if needed
if [ "$current_gid" != "$TARGET_GID" ]; then
if groupmod -g "$TARGET_GID" mysql 2>/dev/null; then
target_group_name="mysql"
else
existing_group="$(getent group "$TARGET_GID" | cut -d: -f1 || true)"
if [ -z "$existing_group" ]; then
existing_group="mysql-host"
if ! getent group "$existing_group" >/dev/null 2>&1; then
groupadd -g "$TARGET_GID" "$existing_group"
fi
fi
usermod -g "$existing_group" mysql
target_group_name="$existing_group"
fi
else
target_group_name="$(getent group mysql | cut -d: -f1)"
fi
if [ -z "$target_group_name" ]; then
target_group_name="$(getent group "$TARGET_GID" | cut -d: -f1 || true)"
fi
# Adjust user UID if needed
if [ "$current_uid" != "$TARGET_UID" ]; then
if getent passwd "$TARGET_UID" >/dev/null 2>&1 && [ "$(getent passwd "$TARGET_UID" | cut -d: -f1)" != "mysql" ]; then
echo "mysql-entrypoint: UID ${TARGET_UID} already in use by $(getent passwd "$TARGET_UID" | cut -d: -f1)." >&2
echo "mysql-entrypoint: Please choose a different CONTAINER_USER or adjust the image." >&2
exit 1
fi
usermod -u "$TARGET_UID" mysql
fi
# Ensure group lookup after potential changes
target_group_name="$(getent group "$TARGET_GID" | cut -d: -f1 || echo "$target_group_name")"
else
target_group_name="$(getent group mysql | cut -d: -f1 || echo mysql)"
fi
# Update ownership on relevant directories if they exist # Update ownership on relevant directories if they exist
for path in /var/lib/mysql-runtime /var/lib/mysql /var/lib/mysql-persistent /backups; do for path in /var/lib/mysql-runtime /var/lib/mysql /var/lib/mysql-persistent /backups; do
@@ -79,6 +79,91 @@ for path in /var/lib/mysql-runtime /var/lib/mysql /var/lib/mysql-persistent /bac
fi fi
done done
# Minimal fix: Restore data from persistent storage on startup and sync on shutdown only
RUNTIME_DIR="/var/lib/mysql-runtime"
PERSISTENT_DIR="/var/lib/mysql-persistent"
sync_datadir() {
if [ ! -d "$RUNTIME_DIR" ]; then
echo "⚠️ Runtime directory not found: $RUNTIME_DIR"
return 1
fi
if [ ! -d "$PERSISTENT_DIR" ]; then
echo "⚠️ Persistent directory not found: $PERSISTENT_DIR"
return 1
fi
user_schema_count="$(find "$RUNTIME_DIR" -mindepth 1 -maxdepth 1 -type d \
! -name mysql \
! -name performance_schema \
! -name information_schema \
! -name sys \
! -name "#innodb_temp" \
! -name "#innodb_redo" 2>/dev/null | wc -l | tr -d ' ')"
if [ "${user_schema_count:-0}" -eq 0 ]; then
echo "⚠️ Runtime data appears empty (system schemas only); skipping sync"
return 0
fi
echo "📦 Syncing MySQL data to persistent storage..."
if command -v rsync >/dev/null 2>&1; then
rsync -a --delete \
--exclude='.restore-completed' \
--exclude='.restore-failed' \
--exclude='.import-completed' \
--exclude='backup.sql' \
"$RUNTIME_DIR"/ "$PERSISTENT_DIR"/
else
# Mirror the runtime state while preserving marker files.
find "$PERSISTENT_DIR" -mindepth 1 -maxdepth 1 \
! -name ".restore-completed" \
! -name ".restore-failed" \
! -name ".import-completed" \
! -name "backup.sql" \
-exec rm -rf {} + 2>/dev/null || true
cp -a "$RUNTIME_DIR"/. "$PERSISTENT_DIR"/
fi
chown -R mysql:"$target_group_name" "$PERSISTENT_DIR"
echo "✅ Sync completed"
}
handle_shutdown() {
echo "🔻 Shutdown signal received"
if command -v mysqladmin >/dev/null 2>&1; then
if mysqladmin -h localhost -u root -p"${MYSQL_ROOT_PASSWORD:-}" shutdown 2>/dev/null; then
echo "✅ MySQL shutdown complete"
sync_datadir || true
else
echo "⚠️ mysqladmin shutdown failed; skipping sync to avoid corruption"
fi
else
echo "⚠️ mysqladmin not found; skipping sync"
fi
if [ -n "${child_pid:-}" ] && kill -0 "$child_pid" 2>/dev/null; then
wait "$child_pid" || true
fi
exit 0
}
# Simple startup restoration
if [ -d "$PERSISTENT_DIR" ]; then
# Check for MySQL data files (exclude marker files starting with .)
if find "$PERSISTENT_DIR" -maxdepth 1 -name "*" ! -name ".*" ! -path "$PERSISTENT_DIR" | grep -q .; then
if [ -d "$RUNTIME_DIR" ] && [ -z "$(ls -A "$RUNTIME_DIR" 2>/dev/null)" ]; then
echo "🔄 Restoring MySQL data from persistent storage..."
cp -a "$PERSISTENT_DIR"/* "$RUNTIME_DIR/" 2>/dev/null || true
chown -R mysql:"$target_group_name" "$RUNTIME_DIR"
echo "✅ Data restored from persistent storage"
fi
fi
fi
# Simple approach: restore on startup only
# Data loss window exists but prevents complete loss on restart
trap handle_shutdown TERM INT
disable_binlog="${MYSQL_DISABLE_BINLOG:-}" disable_binlog="${MYSQL_DISABLE_BINLOG:-}"
if [ "${disable_binlog}" = "1" ]; then if [ "${disable_binlog}" = "1" ]; then
add_skip_flag=1 add_skip_flag=1
@@ -93,4 +178,6 @@ if [ "${disable_binlog}" = "1" ]; then
fi fi
fi fi
exec "$ORIGINAL_ENTRYPOINT" "$@" "$ORIGINAL_ENTRYPOINT" "$@" &
child_pid=$!
wait "$child_pid"

344
scripts/bash/pdump-import.sh Executable file
View File

@@ -0,0 +1,344 @@
#!/bin/bash
# Import character pdump files into AzerothCore database
set -euo pipefail
INVOCATION_DIR="$PWD"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
COLOR_RED='\033[0;31m'
COLOR_GREEN='\033[0;32m'
COLOR_YELLOW='\033[1;33m'
COLOR_BLUE='\033[0;34m'
COLOR_RESET='\033[0m'
log(){ printf '%b\n' "${COLOR_GREEN}$*${COLOR_RESET}"; }
warn(){ printf '%b\n' "${COLOR_YELLOW}$*${COLOR_RESET}"; }
err(){ printf '%b\n' "${COLOR_RED}$*${COLOR_RESET}"; }
info(){ printf '%b\n' "${COLOR_BLUE}$*${COLOR_RESET}"; }
fatal(){ err "$*"; exit 1; }
MYSQL_PW=""
PDUMP_FILE=""
TARGET_ACCOUNT=""
NEW_CHARACTER_NAME=""
FORCE_GUID=""
AUTH_DB="acore_auth"
CHARACTERS_DB="acore_characters"
DRY_RUN=false
BACKUP_BEFORE=true
usage(){
cat <<'EOF'
Usage: ./pdump-import.sh [options]
Import character pdump files into AzerothCore database.
Required Options:
-f, --file FILE Pdump file to import (.pdump or .sql format)
-a, --account ACCOUNT Target account name or ID for character import
-p, --password PASS MySQL root password
Optional:
-n, --name NAME New character name (if different from dump)
-g, --guid GUID Force specific character GUID
--auth-db NAME Auth database schema name (default: acore_auth)
--characters-db NAME Characters database schema name (default: acore_characters)
--dry-run Validate pdump without importing
--no-backup Skip pre-import backup (not recommended)
-h, --help Show this help and exit
Examples:
# Import character from pdump file
./pdump-import.sh --file character.pdump --account testaccount --password azerothcore123
# Import with new character name
./pdump-import.sh --file oldchar.pdump --account newaccount --name "NewCharName" --password azerothcore123
# Validate pdump file without importing
./pdump-import.sh --file character.pdump --account testaccount --password azerothcore123 --dry-run
Notes:
- Account must exist in the auth database before import
- Character names must be unique across the server
- Pre-import backup is created automatically (can be disabled with --no-backup)
- Use --dry-run to validate pdump structure before actual import
EOF
}
validate_account(){
local account="$1"
if [[ "$account" =~ ^[0-9]+$ ]]; then
# Account ID provided
local count
count=$(docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT COUNT(*) FROM ${AUTH_DB}.account WHERE id = $account;")
[[ "$count" -eq 1 ]] || fatal "Account ID $account not found in auth database"
else
# Account name provided
local count
count=$(docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT COUNT(*) FROM ${AUTH_DB}.account WHERE username = '$account';")
[[ "$count" -eq 1 ]] || fatal "Account '$account' not found in auth database"
fi
}
get_account_id(){
local account="$1"
if [[ "$account" =~ ^[0-9]+$ ]]; then
echo "$account"
else
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT id FROM ${AUTH_DB}.account WHERE username = '$account';"
fi
}
validate_character_name(){
local name="$1"
# Check character name format (WoW naming rules)
if [[ ! "$name" =~ ^[A-Za-z]{2,12}$ ]]; then
fatal "Invalid character name: '$name'. Must be 2-12 letters, no numbers or special characters."
fi
# Check if character name already exists
local count
count=$(docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT COUNT(*) FROM ${CHARACTERS_DB}.characters WHERE name = '$name';")
[[ "$count" -eq 0 ]] || fatal "Character name '$name' already exists in database"
}
get_next_guid(){
docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT COALESCE(MAX(guid), 0) + 1 FROM ${CHARACTERS_DB}.characters;"
}
validate_pdump_format(){
local file="$1"
if [[ ! -f "$file" ]]; then
fatal "Pdump file not found: $file"
fi
# Check if file is readable and has SQL-like content
if ! head -10 "$file" | grep -q -i "INSERT\|UPDATE\|CREATE\|ALTER"; then
warn "File does not appear to contain SQL statements. Continuing anyway..."
fi
info "Pdump file validation: OK"
}
backup_characters(){
local timestamp
timestamp=$(date +%Y%m%d_%H%M%S)
local backup_file="manual-backups/characters-pre-pdump-import-${timestamp}.sql"
mkdir -p manual-backups
log "Creating backup: $backup_file"
docker exec ac-mysql mysqldump -uroot -p"$MYSQL_PW" "$CHARACTERS_DB" > "$backup_file"
echo "$backup_file"
}
process_pdump_sql(){
local file="$1"
local account_id="$2"
local new_guid="${3:-}"
local new_name="${4:-}"
# Create temporary processed file
local temp_file
temp_file=$(mktemp)
# Process the pdump SQL file
# Replace account references and optionally GUID/name
if [[ -n "$new_guid" && -n "$new_name" ]]; then
sed -e "s/\([^0-9]\)[0-9]\+\([^0-9].*account.*=\)/\1${account_id}\2/g" \
-e "s/\([^0-9]\)[0-9]\+\([^0-9].*guid.*=\)/\1${new_guid}\2/g" \
-e "s/'[^']*'\([^']*name.*=\)/'${new_name}'\1/g" \
"$file" > "$temp_file"
elif [[ -n "$new_guid" ]]; then
sed -e "s/\([^0-9]\)[0-9]\+\([^0-9].*account.*=\)/\1${account_id}\2/g" \
-e "s/\([^0-9]\)[0-9]\+\([^0-9].*guid.*=\)/\1${new_guid}\2/g" \
"$file" > "$temp_file"
elif [[ -n "$new_name" ]]; then
sed -e "s/\([^0-9]\)[0-9]\+\([^0-9].*account.*=\)/\1${account_id}\2/g" \
-e "s/'[^']*'\([^']*name.*=\)/'${new_name}'\1/g" \
"$file" > "$temp_file"
else
sed -e "s/\([^0-9]\)[0-9]\+\([^0-9].*account.*=\)/\1${account_id}\2/g" \
"$file" > "$temp_file"
fi
echo "$temp_file"
}
import_pdump(){
local processed_file="$1"
log "Importing character data into $CHARACTERS_DB database"
if docker exec -i ac-mysql mysql -uroot -p"$MYSQL_PW" "$CHARACTERS_DB" < "$processed_file"; then
log "Character import completed successfully"
else
fatal "Character import failed. Check MySQL logs for details."
fi
}
case "${1:-}" in
-h|--help) usage; exit 0;;
esac
# Parse command line arguments
POSITIONAL=()
while [[ $# -gt 0 ]]; do
case "$1" in
-f|--file)
[[ $# -ge 2 ]] || fatal "--file requires a file path"
PDUMP_FILE="$2"
shift 2
;;
-a|--account)
[[ $# -ge 2 ]] || fatal "--account requires an account name or ID"
TARGET_ACCOUNT="$2"
shift 2
;;
-p|--password)
[[ $# -ge 2 ]] || fatal "--password requires a value"
MYSQL_PW="$2"
shift 2
;;
-n|--name)
[[ $# -ge 2 ]] || fatal "--name requires a character name"
NEW_CHARACTER_NAME="$2"
shift 2
;;
-g|--guid)
[[ $# -ge 2 ]] || fatal "--guid requires a GUID number"
FORCE_GUID="$2"
shift 2
;;
--auth-db)
[[ $# -ge 2 ]] || fatal "--auth-db requires a value"
AUTH_DB="$2"
shift 2
;;
--characters-db)
[[ $# -ge 2 ]] || fatal "--characters-db requires a value"
CHARACTERS_DB="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
--no-backup)
BACKUP_BEFORE=false
shift
;;
-h|--help)
usage
exit 0
;;
--)
shift
while [[ $# -gt 0 ]]; do
POSITIONAL+=("$1")
shift
done
break
;;
-*)
fatal "Unknown option: $1"
;;
*)
POSITIONAL+=("$1")
shift
;;
esac
done
# Validate required arguments
[[ -n "$PDUMP_FILE" ]] || fatal "Pdump file is required. Use --file FILE"
[[ -n "$TARGET_ACCOUNT" ]] || fatal "Target account is required. Use --account ACCOUNT"
[[ -n "$MYSQL_PW" ]] || fatal "MySQL password is required. Use --password PASS"
# Resolve relative paths
if [[ ! "$PDUMP_FILE" =~ ^/ ]]; then
PDUMP_FILE="$INVOCATION_DIR/$PDUMP_FILE"
fi
# Validate inputs
log "Validating pdump file..."
validate_pdump_format "$PDUMP_FILE"
log "Validating target account..."
validate_account "$TARGET_ACCOUNT"
ACCOUNT_ID=$(get_account_id "$TARGET_ACCOUNT")
log "Target account ID: $ACCOUNT_ID"
if [[ -n "$NEW_CHARACTER_NAME" ]]; then
log "Validating new character name..."
validate_character_name "$NEW_CHARACTER_NAME"
fi
# Determine GUID
if [[ -n "$FORCE_GUID" ]]; then
CHARACTER_GUID="$FORCE_GUID"
log "Using forced GUID: $CHARACTER_GUID"
else
CHARACTER_GUID=$(get_next_guid)
log "Using next available GUID: $CHARACTER_GUID"
fi
# Process pdump file
log "Processing pdump file..."
PROCESSED_FILE=$(process_pdump_sql "$PDUMP_FILE" "$ACCOUNT_ID" "$CHARACTER_GUID" "$NEW_CHARACTER_NAME")
if $DRY_RUN; then
info "DRY RUN: Pdump processing completed successfully"
info "Processed file saved to: $PROCESSED_FILE"
info "Account ID: $ACCOUNT_ID"
info "Character GUID: $CHARACTER_GUID"
[[ -n "$NEW_CHARACTER_NAME" ]] && info "Character name: $NEW_CHARACTER_NAME"
info "Run without --dry-run to perform actual import"
rm -f "$PROCESSED_FILE"
exit 0
fi
# Create backup before import
BACKUP_FILE=""
if $BACKUP_BEFORE; then
BACKUP_FILE=$(backup_characters)
fi
# Stop world server to prevent issues during import
log "Stopping world server for safe import..."
docker stop ac-worldserver >/dev/null 2>&1 || warn "World server was not running"
# Perform import
trap 'rm -f "$PROCESSED_FILE"' EXIT
import_pdump "$PROCESSED_FILE"
# Restart world server
log "Restarting world server..."
docker start ac-worldserver >/dev/null 2>&1
# Wait for server to initialize
log "Waiting for world server to initialize..."
for i in {1..30}; do
if docker exec ac-worldserver pgrep worldserver >/dev/null 2>&1; then
log "World server is running"
break
fi
if [ $i -eq 30 ]; then
warn "World server took longer than expected to start"
fi
sleep 2
done
# Verify import
CHARACTER_COUNT=$(docker exec ac-mysql mysql -uroot -p"$MYSQL_PW" -N -B -e \
"SELECT COUNT(*) FROM ${CHARACTERS_DB}.characters WHERE account = $ACCOUNT_ID;")
log "Import completed successfully!"
log "Characters on account $TARGET_ACCOUNT: $CHARACTER_COUNT"
[[ -n "$BACKUP_FILE" ]] && log "Backup created: $BACKUP_FILE"
info "Character import from pdump completed. You can now log in and play!"

View File

@@ -4,6 +4,7 @@ import os
import re import re
import socket import socket
import subprocess import subprocess
import sys
import time import time
from pathlib import Path from pathlib import Path
@@ -468,8 +469,14 @@ def docker_stats():
def main(): def main():
env = load_env() env = load_env()
project = read_env(env, "COMPOSE_PROJECT_NAME", "acore-compose") project = read_env(env, "COMPOSE_PROJECT_NAME")
network = read_env(env, "NETWORK_NAME", "azerothcore") if not project:
print(json.dumps({"error": "COMPOSE_PROJECT_NAME not set in environment"}), file=sys.stderr)
sys.exit(1)
network = read_env(env, "NETWORK_NAME")
if not network:
print(json.dumps({"error": "NETWORK_NAME not set in environment"}), file=sys.stderr)
sys.exit(1)
services = [ services = [
("ac-mysql", "MySQL"), ("ac-mysql", "MySQL"),

65
scripts/bash/test-2fa-token.py Executable file
View File

@@ -0,0 +1,65 @@
#!/usr/bin/env python3
"""
Test TOTP token generation for AzerothCore 2FA
"""
import base64
import hmac
import hashlib
import struct
import time
import argparse
def generate_totp(secret, timestamp=None, interval=30):
"""Generate TOTP token from Base32 secret"""
if timestamp is None:
timestamp = int(time.time())
# Calculate time counter
counter = timestamp // interval
# Decode Base32 secret
# Add padding if needed
secret = secret.upper()
missing_padding = len(secret) % 8
if missing_padding:
secret += '=' * (8 - missing_padding)
key = base64.b32decode(secret)
# Pack counter as big-endian 8-byte integer
counter_bytes = struct.pack('>Q', counter)
# Generate HMAC-SHA1 hash
hmac_hash = hmac.new(key, counter_bytes, hashlib.sha1).digest()
# Dynamic truncation
offset = hmac_hash[-1] & 0xf
code = struct.unpack('>I', hmac_hash[offset:offset + 4])[0]
code &= 0x7fffffff
code %= 1000000
return f"{code:06d}"
def main():
parser = argparse.ArgumentParser(description="Generate TOTP tokens for testing")
parser.add_argument('-s', '--secret', required=True, help='Base32 secret')
parser.add_argument('-t', '--time', type=int, help='Unix timestamp (default: current time)')
parser.add_argument('-c', '--count', type=int, default=1, help='Number of tokens to generate')
args = parser.parse_args()
timestamp = args.time or int(time.time())
print(f"Secret: {args.secret}")
print(f"Timestamp: {timestamp} ({time.ctime(timestamp)})")
print(f"Interval: 30 seconds")
print()
for i in range(args.count):
current_time = timestamp + (i * 30)
token = generate_totp(args.secret, current_time)
print(f"Time: {time.ctime(current_time)} | Token: {token}")
if __name__ == "__main__":
main()

301
scripts/bash/validate-env.sh Executable file
View File

@@ -0,0 +1,301 @@
#!/bin/bash
# Validate environment configuration for AzerothCore RealmMaster
# Usage: ./scripts/bash/validate-env.sh [--strict] [--quiet]
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
ENV_FILE="$PROJECT_ROOT/.env"
TEMPLATE_FILE="$PROJECT_ROOT/.env.template"
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Flags
STRICT_MODE=false
QUIET_MODE=false
EXIT_CODE=0
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--strict)
STRICT_MODE=true
shift
;;
--quiet)
QUIET_MODE=true
shift
;;
-h|--help)
cat <<EOF
Usage: $0 [OPTIONS]
Validates environment configuration for required variables.
OPTIONS:
--strict Fail on missing optional variables
--quiet Only show errors, suppress info/success messages
-h, --help Show this help
EXIT CODES:
0 - All required variables present
1 - Missing required variables
2 - Missing optional variables (only in --strict mode)
REQUIRED VARIABLES:
Project Configuration:
COMPOSE_PROJECT_NAME - Project name for containers/images
NETWORK_NAME - Docker network name
Repository Configuration:
ACORE_REPO_STANDARD - Standard AzerothCore repository URL
ACORE_BRANCH_STANDARD - Standard AzerothCore branch name
ACORE_REPO_PLAYERBOTS - Playerbots repository URL
ACORE_BRANCH_PLAYERBOTS - Playerbots branch name
Storage Paths:
STORAGE_PATH - Main storage path
STORAGE_PATH_LOCAL - Local storage path
Database Configuration:
MYSQL_ROOT_PASSWORD - MySQL root password
MYSQL_USER - MySQL user (typically root)
MYSQL_PORT - MySQL port (typically 3306)
MYSQL_HOST - MySQL hostname
DB_AUTH_NAME - Auth database name
DB_WORLD_NAME - World database name
DB_CHARACTERS_NAME - Characters database name
DB_PLAYERBOTS_NAME - Playerbots database name
Container Configuration:
CONTAINER_MYSQL - MySQL container name
CONTAINER_USER - Container user (format: uid:gid)
OPTIONAL VARIABLES (checked with --strict):
MySQL Performance:
MYSQL_INNODB_BUFFER_POOL_SIZE - InnoDB buffer pool size
MYSQL_INNODB_LOG_FILE_SIZE - InnoDB log file size
MYSQL_INNODB_REDO_LOG_CAPACITY - InnoDB redo log capacity
Database Connection:
DB_RECONNECT_SECONDS - Database reconnection delay
DB_RECONNECT_ATTEMPTS - Database reconnection attempts
Build Configuration:
MODULES_REBUILD_SOURCE_PATH - Path to source for module builds
Backup Configuration:
BACKUP_PATH - Backup storage path
BACKUP_RETENTION_DAYS - Daily backup retention
BACKUP_RETENTION_HOURS - Hourly backup retention
Image Configuration:
AC_AUTHSERVER_IMAGE - Auth server Docker image
AC_WORLDSERVER_IMAGE - World server Docker image
AC_DB_IMPORT_IMAGE - Database import Docker image
EXAMPLES:
$0 # Basic validation
$0 --strict # Strict validation (check optional vars)
$0 --quiet # Only show errors
EOF
exit 0
;;
*)
echo -e "${RED}Unknown option: $1${NC}" >&2
exit 1
;;
esac
done
log_info() {
$QUIET_MODE || echo -e "${BLUE} $*${NC}"
}
log_success() {
$QUIET_MODE || echo -e "${GREEN}$*${NC}"
}
log_warning() {
echo -e "${YELLOW}⚠️ $*${NC}" >&2
}
log_error() {
echo -e "${RED}$*${NC}" >&2
}
# Load environment
load_env() {
local file="$1"
if [[ ! -f "$file" ]]; then
return 1
fi
set -a
# shellcheck disable=SC1090
source "$file" 2>/dev/null || return 1
set +a
return 0
}
# Check if variable is set and non-empty
check_var() {
local var_name="$1"
local var_value="${!var_name:-}"
if [[ -z "$var_value" ]]; then
return 1
fi
return 0
}
# Validate required variables
validate_required() {
local missing=()
local required_vars=(
# Project Configuration
"COMPOSE_PROJECT_NAME"
"NETWORK_NAME"
# Repository Configuration
"ACORE_REPO_STANDARD"
"ACORE_BRANCH_STANDARD"
"ACORE_REPO_PLAYERBOTS"
"ACORE_BRANCH_PLAYERBOTS"
# Storage Paths
"STORAGE_PATH"
"STORAGE_PATH_LOCAL"
# Database Configuration
"MYSQL_ROOT_PASSWORD"
"MYSQL_USER"
"MYSQL_PORT"
"MYSQL_HOST"
"DB_AUTH_NAME"
"DB_WORLD_NAME"
"DB_CHARACTERS_NAME"
"DB_PLAYERBOTS_NAME"
# Container Configuration
"CONTAINER_MYSQL"
"CONTAINER_USER"
)
log_info "Checking required variables..."
for var in "${required_vars[@]}"; do
if check_var "$var"; then
log_success "$var=${!var}"
else
log_error "$var is not set"
missing+=("$var")
fi
done
if [[ ${#missing[@]} -gt 0 ]]; then
log_error "Missing required variables: ${missing[*]}"
return 1
fi
log_success "All required variables are set"
return 0
}
# Validate optional variables (strict mode)
validate_optional() {
local missing=()
local optional_vars=(
# MySQL Performance Tuning
"MYSQL_INNODB_BUFFER_POOL_SIZE"
"MYSQL_INNODB_LOG_FILE_SIZE"
"MYSQL_INNODB_REDO_LOG_CAPACITY"
# Database Connection Settings
"DB_RECONNECT_SECONDS"
"DB_RECONNECT_ATTEMPTS"
# Build Configuration
"MODULES_REBUILD_SOURCE_PATH"
# Backup Configuration
"BACKUP_PATH"
"BACKUP_RETENTION_DAYS"
"BACKUP_RETENTION_HOURS"
# Image Configuration
"AC_AUTHSERVER_IMAGE"
"AC_WORLDSERVER_IMAGE"
"AC_DB_IMPORT_IMAGE"
)
log_info "Checking optional variables..."
for var in "${optional_vars[@]}"; do
if check_var "$var"; then
log_success "$var is set"
else
log_warning "$var is not set (using default)"
missing+=("$var")
fi
done
if [[ ${#missing[@]} -gt 0 ]]; then
log_warning "Optional variables not set: ${missing[*]}"
return 2
fi
log_success "All optional variables are set"
return 0
}
# Main validation
main() {
log_info "Validating environment configuration..."
echo ""
# Check if .env exists
if [[ ! -f "$ENV_FILE" ]]; then
log_error ".env file not found at $ENV_FILE"
log_info "Copy .env.template to .env and configure it:"
log_info " cp $TEMPLATE_FILE $ENV_FILE"
exit 1
fi
# Load environment
if ! load_env "$ENV_FILE"; then
log_error "Failed to load $ENV_FILE"
exit 1
fi
log_success "Loaded environment from $ENV_FILE"
echo ""
# Validate required variables
if ! validate_required; then
EXIT_CODE=1
fi
echo ""
# Validate optional variables if strict mode
if $STRICT_MODE; then
if ! validate_optional; then
[[ $EXIT_CODE -eq 0 ]] && EXIT_CODE=2
fi
echo ""
fi
# Final summary
if [[ $EXIT_CODE -eq 0 ]]; then
log_success "Environment validation passed ✨"
elif [[ $EXIT_CODE -eq 1 ]]; then
log_error "Environment validation failed (missing required variables)"
elif [[ $EXIT_CODE -eq 2 ]]; then
log_warning "Environment validation passed with warnings (missing optional variables)"
fi
exit $EXIT_CODE
}
main "$@"

View File

@@ -1,4 +1,4 @@
module acore-compose/statusdash module azerothcore-realmmaster/statusdash
go 1.22 go 1.22

View File

@@ -41,9 +41,68 @@ Reads patch definitions from module metadata.
## Module-Specific Hooks ## Module-Specific Hooks
Module-specific hooks are named after their primary module: Module-specific hooks are named after their primary module and handle unique setup requirements.
- `mod-ale-patches` - Apply mod-ale compatibility fixes
- `black-market-setup` - Black Market specific setup ### `mod-ale-patches`
Applies compatibility patches for mod-ale (ALE - AzerothCore Lua Engine, formerly Eluna) when building with the AzerothCore playerbots fork.
**Auto-Detection:**
The hook automatically detects if you're building with the playerbots fork by checking:
1. `STACK_SOURCE_VARIANT=playerbots` environment variable
2. `MODULES_REBUILD_SOURCE_PATH` contains "azerothcore-playerbots"
**Patches Applied:**
#### SendTrainerList Compatibility Fix
**When Applied:** Automatically for playerbots fork (or when `APPLY_SENDTRAINERLIST_PATCH=1`)
**What it fixes:** Adds missing `GetGUID()` call to fix trainer list display
**File:** `src/LuaEngine/methods/PlayerMethods.h`
**Change:**
```cpp
// Before (broken)
player->GetSession()->SendTrainerList(obj);
// After (fixed)
player->GetSession()->SendTrainerList(obj->GetGUID());
```
#### MovePath Compatibility Fix
**When Applied:** Only when explicitly enabled with `APPLY_MOVEPATH_PATCH=1` (disabled by default)
**What it fixes:** Updates deprecated waypoint movement API
**File:** `src/LuaEngine/methods/CreatureMethods.h`
**Change:**
```cpp
// Before (deprecated)
MoveWaypoint(creature->GetWaypointPath(), true);
// After (updated API)
MovePath(creature->GetWaypointPath(), FORCED_MOVEMENT_RUN);
```
**Note:** Currently disabled by default as testing shows it's not required for normal operation.
**Feature Flags:**
```bash
# Automatically set for playerbots fork
APPLY_SENDTRAINERLIST_PATCH=1
# Disabled by default - enable if needed
APPLY_MOVEPATH_PATCH=0
```
**Debug Output:**
The hook provides detailed debug information during builds:
```
🔧 mod-ale-patches: Applying playerbots fork compatibility fixes to mod-ale
✅ Playerbots detected via MODULES_REBUILD_SOURCE_PATH
✅ Applied SendTrainerList compatibility fix
✅ Applied 1 compatibility patch(es)
```
**Why This Exists:**
The playerbots fork has slightly different API signatures in certain WorldSession methods. These patches ensure mod-ale (Eluna) compiles and functions correctly with both standard AzerothCore and the playerbots fork.
### `black-market-setup`
Black Market specific setup tasks.
## Usage in Manifest ## Usage in Manifest

View File

@@ -1,5 +1,6 @@
#!/bin/bash #!/bin/bash
# Module-specific hook for mod-ale compatibility patches # Module-specific hook for mod-ale compatibility patches
# NOTE: These patches are primarily needed for the AzerothCore playerbots fork
set -e set -e
# Hook environment # Hook environment
@@ -7,12 +8,44 @@ MODULE_KEY="${MODULE_KEY:-}"
MODULE_DIR="${MODULE_DIR:-}" MODULE_DIR="${MODULE_DIR:-}"
MODULE_NAME="${MODULE_NAME:-}" MODULE_NAME="${MODULE_NAME:-}"
# Detect if we're building with playerbots fork
IS_PLAYERBOTS_FORK=0
# Method 1: Check STACK_SOURCE_VARIANT environment variable
if [ "${STACK_SOURCE_VARIANT:-}" = "playerbots" ]; then
IS_PLAYERBOTS_FORK=1
echo " ✅ Playerbots detected via STACK_SOURCE_VARIANT"
# Method 2: Check MODULES_REBUILD_SOURCE_PATH
elif [ -n "${MODULES_REBUILD_SOURCE_PATH:-}" ] && echo "${MODULES_REBUILD_SOURCE_PATH}" | grep -q "azerothcore-playerbots"; then
IS_PLAYERBOTS_FORK=1
echo " ✅ Playerbots detected via MODULES_REBUILD_SOURCE_PATH"
else
echo " ❌ Playerbots fork not detected"
echo " 🔍 Debug: STACK_SOURCE_VARIANT='${STACK_SOURCE_VARIANT:-}'"
echo " 🔍 Debug: MODULES_REBUILD_SOURCE_PATH='${MODULES_REBUILD_SOURCE_PATH:-}'"
fi
# Feature flags (set to 0 to disable specific patches)
APPLY_MOVEPATH_PATCH="${APPLY_MOVEPATH_PATCH:-0}" # Disabled by default - appears unnecessary
# SendTrainerList patch: auto-detect based on fork, but can be overridden
if [ -z "${APPLY_SENDTRAINERLIST_PATCH:-}" ]; then
APPLY_SENDTRAINERLIST_PATCH="$IS_PLAYERBOTS_FORK" # Only needed for playerbots fork
else
APPLY_SENDTRAINERLIST_PATCH="${APPLY_SENDTRAINERLIST_PATCH}"
fi
# Override keyword patch: always apply (C++11 best practice)
APPLY_OVERRIDE_PATCH="${APPLY_OVERRIDE_PATCH:-1}"
if [ -z "$MODULE_DIR" ] || [ ! -d "$MODULE_DIR" ]; then if [ -z "$MODULE_DIR" ] || [ ! -d "$MODULE_DIR" ]; then
echo "❌ mod-ale-patches: Invalid module directory: $MODULE_DIR" echo "❌ mod-ale-patches: Invalid module directory: $MODULE_DIR"
exit 2 exit 2
fi fi
echo "🔧 mod-ale-patches: Applying compatibility fixes to $MODULE_NAME" if [ "$IS_PLAYERBOTS_FORK" = "1" ]; then
echo "🔧 mod-ale-patches: Applying playerbots fork compatibility fixes to $MODULE_NAME"
else
echo "🔧 mod-ale-patches: Checking compatibility fixes for $MODULE_NAME"
fi
# Apply MovePath compatibility patch # Apply MovePath compatibility patch
apply_movepath_patch() { apply_movepath_patch() {
@@ -37,10 +70,85 @@ apply_movepath_patch() {
fi fi
} }
# Apply override keyword patch
apply_override_patch() {
local found_files=()
# Search for .cpp and .h files that need override keyword
while IFS= read -r -d '' file; do
if grep -l 'void OnPlayerLogin(Player\* player)' "$file" >/dev/null 2>&1; then
found_files+=("$file")
fi
done < <(find "$MODULE_DIR" -name "*.cpp" -o -name "*.h" -print0)
if [ ${#found_files[@]} -eq 0 ]; then
echo " ✅ No files need override keyword fix"
return 0
fi
local patch_count=0
for file in "${found_files[@]}"; do
# Check if OnPlayerLogin exists without override keyword
if grep -q 'void OnPlayerLogin(Player\* player) {' "$file" && ! grep -q 'void OnPlayerLogin(Player\* player) override {' "$file"; then
if sed -i 's/void OnPlayerLogin(Player\* player) {/void OnPlayerLogin(Player* player) override {/' "$file"; then
echo " ✅ Applied override keyword fix to $(basename "$file")"
patch_count=$((patch_count + 1))
else
echo " ❌ Failed to apply override keyword fix to $(basename "$file")"
return 2
fi
fi
done
if [ $patch_count -eq 0 ]; then
echo " ✅ Override keyword fix already present"
else
echo " ✅ Applied override keyword fix to $patch_count file(s)"
fi
return 0
}
# Apply SendTrainerList compatibility patch
apply_sendtrainerlist_patch() {
local target_file="$MODULE_DIR/src/LuaEngine/methods/PlayerMethods.h"
if [ ! -f "$target_file" ]; then
echo " ⚠️ SendTrainerList patch target file missing: $target_file"
return 1
fi
# Check if the buggy code exists (with ->GetGUID())
if grep -q 'player->GetSession()->SendTrainerList(obj->GetGUID());' "$target_file"; then
# Apply the fix by casting to Creature* instead of using GetGUID()
if sed -i 's/player->GetSession()->SendTrainerList(obj->GetGUID());/if (Creature* creature = obj->ToCreature()) player->GetSession()->SendTrainerList(creature);/' "$target_file"; then
echo " ✅ Applied SendTrainerList compatibility fix"
return 0
else
echo " ❌ Failed to apply SendTrainerList compatibility fix"
return 2
fi
else
echo " ✅ SendTrainerList compatibility fix already present"
return 0
fi
}
# Apply all patches # Apply all patches
patch_count=0 patch_count=0
if apply_movepath_patch; then if [ "$APPLY_OVERRIDE_PATCH" = "1" ]; then
patch_count=$((patch_count + 1)) if apply_override_patch; then
patch_count=$((patch_count + 1))
fi
fi
if [ "$APPLY_MOVEPATH_PATCH" = "1" ]; then
if apply_movepath_patch; then
patch_count=$((patch_count + 1))
fi
fi
if [ "$APPLY_SENDTRAINERLIST_PATCH" = "1" ]; then
if apply_sendtrainerlist_patch; then
patch_count=$((patch_count + 1))
fi
fi fi
if [ $patch_count -eq 0 ]; then if [ $patch_count -eq 0 ]; then

View File

@@ -0,0 +1,161 @@
#!/usr/bin/env python3
"""
Apply a module profile to .env file for CI/CD builds.
This script reads a module profile JSON and enables the specified modules
in the .env file, ready for automated builds.
"""
import argparse
import json
import sys
from pathlib import Path
from typing import List, Set
def load_profile(profile_path: Path) -> List[str]:
"""Load module list from a profile JSON file."""
try:
with open(profile_path, 'r') as f:
data = json.load(f)
except FileNotFoundError:
print(f"ERROR: Profile not found: {profile_path}", file=sys.stderr)
sys.exit(1)
except json.JSONDecodeError as e:
print(f"ERROR: Invalid JSON in profile: {e}", file=sys.stderr)
sys.exit(1)
modules = data.get('modules', [])
if not isinstance(modules, list):
print("ERROR: 'modules' must be a list in profile JSON", file=sys.stderr)
sys.exit(1)
return [m.strip() for m in modules if m.strip()]
def read_env_template(template_path: Path) -> List[str]:
"""Read the .env.template file."""
try:
with open(template_path, 'r') as f:
return f.readlines()
except FileNotFoundError:
print(f"ERROR: Template not found: {template_path}", file=sys.stderr)
sys.exit(1)
def apply_profile_to_env(template_lines: List[str], enabled_modules: Set[str]) -> List[str]:
"""
Process template lines and enable specified modules.
Sets MODULE_* variables to 1 if they're in enabled_modules, otherwise keeps template value.
"""
output_lines = []
for line in template_lines:
stripped = line.strip()
# Check if this is a MODULE_ variable line
if stripped.startswith('MODULE_') and '=' in stripped:
# Extract the module name (before the =)
module_name = stripped.split('=')[0].strip()
if module_name in enabled_modules:
# Enable this module
output_lines.append(f"{module_name}=1\n")
else:
# Keep original line (usually =0 or commented)
output_lines.append(line)
else:
# Not a module line, keep as-is
output_lines.append(line)
return output_lines
def write_env_file(env_path: Path, lines: List[str]):
"""Write the processed lines to .env file."""
try:
with open(env_path, 'w') as f:
f.writelines(lines)
print(f"✅ Applied profile to {env_path}")
except IOError as e:
print(f"ERROR: Failed to write .env file: {e}", file=sys.stderr)
sys.exit(1)
def main():
parser = argparse.ArgumentParser(
description='Apply a module profile to .env file for automated builds'
)
parser.add_argument(
'profile',
help='Name of the profile (e.g., RealmMaster) or path to profile JSON'
)
parser.add_argument(
'--env-template',
default='.env.template',
help='Path to .env.template file (default: .env.template)'
)
parser.add_argument(
'--env-output',
default='.env',
help='Path to output .env file (default: .env)'
)
parser.add_argument(
'--profiles-dir',
default='config/module-profiles',
help='Directory containing profile JSON files (default: config/module-profiles)'
)
parser.add_argument(
'--list-modules',
action='store_true',
help='List modules that will be enabled and exit'
)
args = parser.parse_args()
# Resolve profile path
profile_path = Path(args.profile)
if not profile_path.exists():
# Try treating it as a profile name
profile_path = Path(args.profiles_dir) / f"{args.profile}.json"
if not profile_path.exists():
print(f"ERROR: Profile not found: {args.profile}", file=sys.stderr)
print(f" Tried: {Path(args.profile)}", file=sys.stderr)
print(f" Tried: {profile_path}", file=sys.stderr)
sys.exit(1)
# Load the profile
print(f"📋 Loading profile: {profile_path.name}")
enabled_modules = set(load_profile(profile_path))
if args.list_modules:
print(f"\nModules to be enabled ({len(enabled_modules)}):")
for module in sorted(enabled_modules):
print(f"{module}")
return
print(f"✓ Found {len(enabled_modules)} modules in profile")
# Read template
template_path = Path(args.env_template)
template_lines = read_env_template(template_path)
# Apply profile
output_lines = apply_profile_to_env(template_lines, enabled_modules)
# Write output
env_path = Path(args.env_output)
write_env_file(env_path, output_lines)
print(f"✓ Profile '{profile_path.stem}' applied successfully")
print(f"\nEnabled modules:")
for module in sorted(enabled_modules)[:10]: # Show first 10
print(f"{module}")
if len(enabled_modules) > 10:
print(f" ... and {len(enabled_modules) - 10} more")
if __name__ == '__main__':
main()

View File

@@ -371,12 +371,7 @@ def build_state(env_path: Path, manifest_path: Path) -> ModuleCollectionState:
for unknown_key in extra_env_modules: for unknown_key in extra_env_modules:
warnings.append(f".env defines {unknown_key} but it is missing from the manifest") warnings.append(f".env defines {unknown_key} but it is missing from the manifest")
# Warn if manifest entry lacks .env toggle # Skip warnings for missing modules - they default to disabled (0) as intended
for module in modules:
if module.key not in env_map and module.key not in os.environ:
warnings.append(
f"Manifest includes {module.key} but .env does not define it (defaulting to 0)"
)
return ModuleCollectionState( return ModuleCollectionState(
manifest_path=manifest_path, manifest_path=manifest_path,

View File

@@ -50,6 +50,9 @@ def clean(value: str) -> str:
def cmd_keys(manifest_path: str) -> None: def cmd_keys(manifest_path: str) -> None:
manifest = load_manifest(manifest_path) manifest = load_manifest(manifest_path)
for entry in iter_modules(manifest): for entry in iter_modules(manifest):
# Skip blocked modules
if entry.get("status") == "blocked":
continue
print(entry["key"]) print(entry["key"])
@@ -96,7 +99,7 @@ def cmd_metadata(manifest_path: str) -> None:
def cmd_sorted_keys(manifest_path: str) -> None: def cmd_sorted_keys(manifest_path: str) -> None:
manifest = load_manifest(manifest_path) manifest = load_manifest(manifest_path)
modules = list(iter_modules(manifest)) modules = [entry for entry in iter_modules(manifest) if entry.get("status") != "blocked"]
modules.sort( modules.sort(
key=lambda item: ( key=lambda item: (
# Primary sort by order (default to 5000 if not specified) # Primary sort by order (default to 5000 if not specified)

View File

@@ -28,8 +28,9 @@ def normalize_modules(raw_modules: Iterable[str], profile: Path) -> List[str]:
if not value: if not value:
continue continue
modules.append(value) modules.append(value)
if not modules: # Allow empty modules list for vanilla/minimal profiles
raise ValueError(f"Profile {profile.name}: modules list cannot be empty") if not modules and "vanilla" not in profile.stem.lower() and "minimal" not in profile.stem.lower():
raise ValueError(f"Profile {profile.name}: modules list cannot be empty (except for vanilla/minimal profiles)")
return modules return modules
@@ -79,7 +80,7 @@ def cmd_list(directory: Path) -> int:
profiles.sort(key=lambda item: item[4]) profiles.sort(key=lambda item: item[4])
for name, modules, label, description, order in profiles: for name, modules, label, description, order in profiles:
modules_csv = ",".join(modules) modules_csv = ",".join(modules) if modules else "-"
print("\t".join([name, modules_csv, label, description, str(order)])) print("\t".join([name, modules_csv, label, description, str(order)]))
return 0 return 0

View File

@@ -18,6 +18,7 @@ import re
import sys import sys
import time import time
from dataclasses import dataclass from dataclasses import dataclass
from pathlib import Path
from typing import Dict, Iterable, List, Optional, Sequence from typing import Dict, Iterable, List, Optional, Sequence
from urllib import error, parse, request from urllib import error, parse, request
@@ -45,7 +46,7 @@ CATEGORY_BY_TYPE = {
"data": "data", "data": "data",
"cpp": "uncategorized", "cpp": "uncategorized",
} }
USER_AGENT = "acore-compose-module-manifest" USER_AGENT = "azerothcore-realmmaster-module-manifest"
def parse_args(argv: Sequence[str]) -> argparse.Namespace: def parse_args(argv: Sequence[str]) -> argparse.Namespace:
@@ -87,6 +88,16 @@ def parse_args(argv: Sequence[str]) -> argparse.Namespace:
action="store_true", action="store_true",
help="Print verbose progress information", help="Print verbose progress information",
) )
parser.add_argument(
"--update-template",
default=".env.template",
help="Update .env.template with missing module variables (default: %(default)s)",
)
parser.add_argument(
"--skip-template",
action="store_true",
help="Skip updating .env.template",
)
return parser.parse_args(argv) return parser.parse_args(argv)
@@ -273,6 +284,117 @@ def collect_repositories(
return list(seen.values()) return list(seen.values())
def update_env_template(manifest_path: str, template_path: str) -> bool:
"""Update .env.template with module variables for active modules only.
Args:
manifest_path: Path to the module manifest JSON file
template_path: Path to .env.template file
Returns:
True if template was updated, False if no changes needed
"""
# Load manifest to get all module keys
manifest = load_manifest(manifest_path)
modules = manifest.get("modules", [])
if not modules:
return False
# Extract only active module keys
active_module_keys = set()
disabled_module_keys = set()
for module in modules:
key = module.get("key")
status = module.get("status", "active")
if key:
if status == "active":
active_module_keys.add(key)
else:
disabled_module_keys.add(key)
if not active_module_keys and not disabled_module_keys:
return False
# Check if template file exists
template_file = Path(template_path)
if not template_file.exists():
print(f"Warning: .env.template not found at {template_path}")
return False
# Read current template content
try:
current_content = template_file.read_text(encoding="utf-8")
current_lines = current_content.splitlines()
except Exception as exc:
print(f"Error reading .env.template: {exc}")
return False
# Find which module variables are currently in the template
existing_vars = set()
current_module_lines = []
non_module_lines = []
for line in current_lines:
stripped = line.strip()
if "=" in stripped and not stripped.startswith("#"):
var_name = stripped.split("=", 1)[0].strip()
if var_name.startswith("MODULE_"):
existing_vars.add(var_name)
current_module_lines.append((var_name, line))
else:
non_module_lines.append(line)
else:
non_module_lines.append(line)
# Determine what needs to change
missing_vars = active_module_keys - existing_vars
vars_to_remove = disabled_module_keys & existing_vars
vars_to_keep = active_module_keys & existing_vars
changes_made = False
# Report what will be done
if missing_vars:
print(f"📝 Adding {len(missing_vars)} active module variable(s) to .env.template:")
for var in sorted(missing_vars):
print(f" + {var}=0")
changes_made = True
if vars_to_remove:
print(f"🗑️ Removing {len(vars_to_remove)} disabled module variable(s) from .env.template:")
for var in sorted(vars_to_remove):
print(f" - {var}")
changes_made = True
if not changes_made:
print("✅ .env.template is up to date with active modules")
return False
# Build new content: non-module lines + active module lines
new_lines = non_module_lines[:]
# Add existing active module variables (preserve their current values)
for var_name, original_line in current_module_lines:
if var_name in vars_to_keep:
new_lines.append(original_line)
# Add new active module variables
for var in sorted(missing_vars):
new_lines.append(f"{var}=0")
# Write updated content
try:
new_content = "\n".join(new_lines) + "\n"
template_file.write_text(new_content, encoding="utf-8")
print("✅ .env.template updated successfully")
print(f" Active modules: {len(active_module_keys)}")
print(f" Disabled modules removed: {len(vars_to_remove)}")
return True
except Exception as exc:
print(f"Error writing .env.template: {exc}")
return False
def main(argv: Sequence[str]) -> int: def main(argv: Sequence[str]) -> int:
args = parse_args(argv) args = parse_args(argv)
topics = args.topics or DEFAULT_TOPICS topics = args.topics or DEFAULT_TOPICS
@@ -291,6 +413,13 @@ def main(argv: Sequence[str]) -> int:
handle.write("\n") handle.write("\n")
print(f"Updated manifest {args.manifest}: added {added}, refreshed {updated}") print(f"Updated manifest {args.manifest}: added {added}, refreshed {updated}")
# Update .env.template if requested (always run to clean up disabled modules)
if not args.skip_template:
template_updated = update_env_template(args.manifest, args.update_template)
if template_updated:
print(f"Updated {args.update_template} with active modules only")
return 0 return 0

275
setup.sh
View File

@@ -3,9 +3,9 @@ set -e
clear clear
# ============================================== # ==============================================
# azerothcore-rm - Interactive .env generator # AzerothCore-RealmMaster - Interactive .env generator
# ============================================== # ==============================================
# Mirrors options from scripts/setup-server.sh but targets azerothcore-rm/.env # Mirrors options from scripts/setup-server.sh but targets .env
# Get script directory for template reading # Get script directory for template reading
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
@@ -16,6 +16,12 @@ TEMPLATE_FILE="$SCRIPT_DIR/.env.template"
source "$SCRIPT_DIR/scripts/bash/project_name.sh" source "$SCRIPT_DIR/scripts/bash/project_name.sh"
DEFAULT_PROJECT_NAME="$(project_name::resolve "$ENV_FILE" "$TEMPLATE_FILE")" DEFAULT_PROJECT_NAME="$(project_name::resolve "$ENV_FILE" "$TEMPLATE_FILE")"
# ==============================================
# Feature Flags
# ==============================================
# Set to 0 to disable server configuration preset selection
ENABLE_CONFIG_PRESETS="${ENABLE_CONFIG_PRESETS:-0}"
# ============================================== # ==============================================
# Constants (auto-loaded from .env.template) # Constants (auto-loaded from .env.template)
# ============================================== # ==============================================
@@ -179,7 +185,7 @@ readonly DEFAULT_DOMAIN_PLACEHOLDER="your-domain.com"
# Module preset names (not in template) # Module preset names (not in template)
readonly DEFAULT_PRESET_SUGGESTED="suggested-modules" readonly DEFAULT_PRESET_SUGGESTED="suggested-modules"
readonly DEFAULT_PRESET_PLAYERBOTS="playerbots-suggested-modules" readonly DEFAULT_PRESET_PLAYERBOTS="suggested-modules-playerbots"
# Health check configuration (loaded via loop) # Health check configuration (loaded via loop)
readonly -a HEALTHCHECK_KEYS=( readonly -a HEALTHCHECK_KEYS=(
@@ -331,25 +337,57 @@ show_wow_header() {
echo -e "${RED}" echo -e "${RED}"
cat <<'EOF' cat <<'EOF'
:::. :::::::::.,:::::: :::::::.. ... :::::::::::: :: .: .,-::::: ... :::::::.. .,:::::: ##
;;`;; '`````;;;;;;;'''' ;;;;``;;;; .;;;;;;;.;;;;;;;;'''',;; ;;, ,;;;'````' .;;;;;;;. ;;;;``;;;; ;;;;'''' ### :*
,[[ '[[, .n[[' [[cccc [[[,/[[[' ,[[ \[[, [[ ,[[[,,,[[[ [[[ ,[[ \[[,[[[,/[[[' [[cccc ##### .**#
c$$$cc$$$c ,$$P" $$"""" $$$$$$c $$$, $$$ $$ "$$$"""$$$ $$$ $$$, $$$$$$$$$c $$"""" ###### ***##
888 888,,888bo,_ 888oo,__ 888b "88bo,"888,_ _,88P 88, 888 "88o`88bo,__,o,"888,_ _,88P888b "88bo,888oo,__ ****###* *****##.
YMM ""` `""*UMM """"YUMMMMMMM "W" "YMMMMMP" MMM MMM YMM "YUMMMMMP" "YMMMMMP" MMMM "W" """"\MMM ******##- ******###.
___ ___ ___ ___ ___ ___ ___ .*********###= ********###
.'`~ ``. .'`~ ``. .'`~ ``. .'`~ ``. .'`~ ``. .'`~ ``. .'`~ ``. ************##### #****###:+* ********####
)`_ ._ ( )`_ ._ ( )`_ ._ ( )`_ ._ ( )`_ ._ ( )`_ ._ ( )`_ ._ ( ***********+****##########**********##**# ********#####
|(_/^\_)| |(_/^\_)| |(_/^\_)| |(_/^\_)| |(_/^\_)| |(_/^\_)| |(_/^\_)| ********=+***********######**********######*#**+*******###+
`-.`''.-' `-.`''.-' `-.`''.-' `-.`''.-' `-.`''.-' `-.`''.-' `-.`''.-' -+*****=**************#######*******####**#####**##*****####-
""" """ """ """ """ """ """ ++**++****************#########**####***####***#####****####:
:++*******************#*******####*****#****######***##*****#######
.')'=.'_`.='(`. .')'=.'_`.='(`. .')'=.'_`.='(`. .')'=.'_`.='(`. .')'=.'_`.='(`. .')'=.'_`.='(`. .')'=.'_`.='(`. *= -++++++******************************###**********###******######
:| -.._H_,.- |: :| -.._H_,.- |: :| -.._H_,.- |: :| -.._H_,.- |: :| -.._H_,.- |: :| -.._H_,.- |: :| -.._H_,.- |: .+***. :++++++++***************************#+*#*-*******************#**+
|: -.__H__.- :| |: -.__H__.- :| |: -.__H__.- :| |: -.__H__.- :| |: -.__H__.- :| |: -.__H__.- :| |: -.__H__.- :| ++*****= =+++=+++***************************+**###**************++*#####*
<' `--V--' `> <' `--V--' `> <' `--V--' `> <' `--V--' `> <' `--V--' `> <' `--V--' `> <' `--V--' `> -++*****+++- -=++++++++*********+++++**###**+++=+*###**+*********##+++*+++##
+++*********+++=-=+++++++++****+++***+++++*####***+++**=**#*==***#####*++***+*+
art: littlebitspace@https://littlebitspace.com/ +++++***********++=-=++++++++*++****=++*++*#######**+=-=+****+*#########***==+*#*
=+++++++*****++++===-++++++++=+++++=++*+=-+#**#**=####****#**+-+**************##*
++++++++++++++======++++++++=====+++++=-+++*+##########*****==*######*****####
+++++++=++++++====++++++++++========---++++*****#######**==***#*******####*
++===++++++++=====+++++++=+++:::--:::.++++++*****####**+=**************#
=+++++=: =+=====-+++++++++++++++++++++==+++--==----:-++++++****####****+=+*+*******:
++++++++++++++++==+++++++++++++++++++++=+=-===-----:+++++++++**+++****####***+++
=++++++++++++++++++++++++++++++++++++=++++======----==+++++++=+************:
:++++++++++++++=+++++++++++++++++++======-------:-====+****************.
=----=+++-==++++++*******++++++++++++++===============****************=
-=---==-=====--+++++++++++++++++++++++++++===+++++++********++#***#++******
+++++========+=====----++++++++++++++++===+++++===--=**********+=++*++********
+++==========-=============-----:-=++=====+++++++++++++++=-=***********+*********
==----=+===+=================+++++++++++++++++++++++++=-********************
.======++++++===============---:::::==++++++++++++++++++++++=**********++*******:
+++==--::-=+++++++++++++========+===--=+- :::=-=++++++++++++++++++++++ +*****++**+***
.-----::::-=++++++++++++++++++==::-----++. :=+++++++++++++++++++*..-+*********=
:=+++++++++++++++++==.:--===-+++++++++++**++++++:::-********
++++++++++++++++++=+++++++++++++**+++++*****==******
.++++++++++++=-:.-+++++++++***++++************+
+++=========:.=+=-::++*****+*************
-++++++++==+: ..::=-. ..::::=********
.+========+==+++==========---::-+*-
++++++++++++=======-======
++++++++++++++======++
-=======++++++:
...
:::. :::::::::.,:::::: :::::::.. ... :::::::::::: :: .: .,-::::: ... :::::::.. .,::::::
;;`;; '`````;;;;;;;'''' ;;;;``;;;; .;;;;;;;.;;;;;;;;'''',;; ;;, ,;;;'````' .;;;;;;;. ;;;;``;;;; ;;;;''''
,[[ '[[, .n[[' [[cccc [[[,/[[[' ,[[ \[[, [[ ,[[[,,,[[[ [[[ ,[[ \[[,[[[,/[[[' [[cccc
c$$$cc$$$c ,$$P" $$"""" $$$$$$c $$$, $$$ $$ "$$$"""$$$ $$$ $$$, $$$$$$$$$c $$""""
888 888,,888bo,_ 888oo,__ 888b "88bo,"888,_ _,88P 88, 888 "88o`88bo,__,o,"888,_ _,88P888b "88bo,888oo,__
YMM ""` `""*UMM """"YUMMMMMMM "W" "YMMMMMP" MMM MMM YMM "YUMMMMMP" "YMMMMMP" MMMM "W" """"\MMM
EOF EOF
echo -e "${NC}" echo -e "${NC}"
} }
@@ -591,7 +629,7 @@ main(){
Usage: ./setup.sh [options] Usage: ./setup.sh [options]
Description: Description:
Interactive wizard that generates azerothcore-rm/.env for the Interactive wizard that generates .env for the
profiles-based compose. Prompts for deployment type, ports, storage, profiles-based compose. Prompts for deployment type, ports, storage,
MySQL credentials, backup retention, and module presets or manual MySQL credentials, backup retention, and module presets or manual
toggles. toggles.
@@ -809,7 +847,7 @@ EOF
fi fi
show_wow_header show_wow_header
say INFO "This will create azerothcore-rm/.env for compose profiles." say INFO "This will create .env for compose profiles."
# Deployment type # Deployment type
say HEADER "DEPLOYMENT TYPE" say HEADER "DEPLOYMENT TYPE"
@@ -963,58 +1001,65 @@ fi
BACKUP_DAILY_TIME=$(ask "Daily backup hour (00-23, UTC)" "${CLI_BACKUP_TIME:-$DEFAULT_BACKUP_TIME}" validate_number) BACKUP_DAILY_TIME=$(ask "Daily backup hour (00-23, UTC)" "${CLI_BACKUP_TIME:-$DEFAULT_BACKUP_TIME}" validate_number)
# Server configuration # Server configuration
say HEADER "SERVER CONFIGURATION PRESET"
local SERVER_CONFIG_PRESET local SERVER_CONFIG_PRESET
if [ -n "$CLI_CONFIG_PRESET" ]; then if [ "$ENABLE_CONFIG_PRESETS" = "1" ]; then
SERVER_CONFIG_PRESET="$CLI_CONFIG_PRESET" say HEADER "SERVER CONFIGURATION PRESET"
say INFO "Using preset from command line: $SERVER_CONFIG_PRESET"
if [ -n "$CLI_CONFIG_PRESET" ]; then
SERVER_CONFIG_PRESET="$CLI_CONFIG_PRESET"
say INFO "Using preset from command line: $SERVER_CONFIG_PRESET"
else
declare -A CONFIG_PRESET_NAMES=()
declare -A CONFIG_PRESET_DESCRIPTIONS=()
declare -A CONFIG_MENU_INDEX=()
local config_dir="$SCRIPT_DIR/config/presets"
local menu_index=1
echo "Choose a server configuration preset:"
if [ -x "$SCRIPT_DIR/scripts/python/parse-config-presets.py" ] && [ -d "$config_dir" ]; then
while IFS=$'\t' read -r preset_key preset_name preset_desc; do
[ -n "$preset_key" ] || continue
CONFIG_PRESET_NAMES["$preset_key"]="$preset_name"
CONFIG_PRESET_DESCRIPTIONS["$preset_key"]="$preset_desc"
CONFIG_MENU_INDEX[$menu_index]="$preset_key"
echo "$menu_index) $preset_name"
echo " $preset_desc"
menu_index=$((menu_index + 1))
done < <(python3 "$SCRIPT_DIR/scripts/python/parse-config-presets.py" list --presets-dir "$config_dir")
else
# Fallback if parser script not available
CONFIG_MENU_INDEX[1]="none"
CONFIG_PRESET_NAMES["none"]="Default (No Preset)"
CONFIG_PRESET_DESCRIPTIONS["none"]="Use default AzerothCore settings"
echo "1) Default (No Preset)"
echo " Use default AzerothCore settings without any modifications"
fi
local max_config_option=$((menu_index - 1))
if [ "$NON_INTERACTIVE" = "1" ]; then
SERVER_CONFIG_PRESET="none"
say INFO "Non-interactive mode: Using default configuration preset"
else
while true; do
read -p "$(echo -e "${YELLOW}🎯 Select server configuration [1-$max_config_option]: ${NC}")" choice
if [[ "$choice" =~ ^[0-9]+$ ]] && [ "$choice" -ge 1 ] && [ "$choice" -le "$max_config_option" ]; then
SERVER_CONFIG_PRESET="${CONFIG_MENU_INDEX[$choice]}"
local chosen_name="${CONFIG_PRESET_NAMES[$SERVER_CONFIG_PRESET]}"
say INFO "Selected: $chosen_name"
break
else
say ERROR "Please select a number between 1 and $max_config_option"
fi
done
fi
fi
else else
declare -A CONFIG_PRESET_NAMES=() # Config presets disabled - use default
declare -A CONFIG_PRESET_DESCRIPTIONS=() SERVER_CONFIG_PRESET="none"
declare -A CONFIG_MENU_INDEX=() say INFO "Server configuration presets disabled - using default settings"
local config_dir="$SCRIPT_DIR/config/presets"
local menu_index=1
echo "Choose a server configuration preset:"
if [ -x "$SCRIPT_DIR/scripts/python/parse-config-presets.py" ] && [ -d "$config_dir" ]; then
while IFS=$'\t' read -r preset_key preset_name preset_desc; do
[ -n "$preset_key" ] || continue
CONFIG_PRESET_NAMES["$preset_key"]="$preset_name"
CONFIG_PRESET_DESCRIPTIONS["$preset_key"]="$preset_desc"
CONFIG_MENU_INDEX[$menu_index]="$preset_key"
echo "$menu_index) $preset_name"
echo " $preset_desc"
menu_index=$((menu_index + 1))
done < <(python3 "$SCRIPT_DIR/scripts/python/parse-config-presets.py" list --presets-dir "$config_dir")
else
# Fallback if parser script not available
CONFIG_MENU_INDEX[1]="none"
CONFIG_PRESET_NAMES["none"]="Default (No Preset)"
CONFIG_PRESET_DESCRIPTIONS["none"]="Use default AzerothCore settings"
echo "1) Default (No Preset)"
echo " Use default AzerothCore settings without any modifications"
fi
local max_config_option=$((menu_index - 1))
if [ "$NON_INTERACTIVE" = "1" ]; then
SERVER_CONFIG_PRESET="none"
say INFO "Non-interactive mode: Using default configuration preset"
else
while true; do
read -p "$(echo -e "${YELLOW}🎯 Select server configuration [1-$max_config_option]: ${NC}")" choice
if [[ "$choice" =~ ^[0-9]+$ ]] && [ "$choice" -ge 1 ] && [ "$choice" -le "$max_config_option" ]; then
SERVER_CONFIG_PRESET="${CONFIG_MENU_INDEX[$choice]}"
local chosen_name="${CONFIG_PRESET_NAMES[$SERVER_CONFIG_PRESET]}"
say INFO "Selected: $chosen_name"
break
else
say ERROR "Please select a number between 1 and $max_config_option"
fi
done
fi
fi fi
local MODE_SELECTION="" local MODE_SELECTION=""
@@ -1094,12 +1139,29 @@ fi
MODE_PRESET_NAME="$CLI_MODULE_PRESET" MODE_PRESET_NAME="$CLI_MODULE_PRESET"
fi fi
# Function to determine source branch for a preset
get_preset_source_branch() {
local preset_name="$1"
local preset_modules="${MODULE_PRESET_CONFIGS[$preset_name]:-}"
# Check if playerbots module is in the preset
if [[ "$preset_modules" == *"MODULE_PLAYERBOTS"* ]]; then
echo "azerothcore-playerbots"
else
echo "azerothcore-wotlk"
fi
}
# Module config # Module config
say HEADER "MODULE PRESET" say HEADER "MODULE PRESET"
echo "1) ${MODULE_PRESET_LABELS[$DEFAULT_PRESET_SUGGESTED]:-⭐ Suggested Modules}" printf " %s) %s\n" "1" "⭐ Suggested Modules"
echo "2) ${MODULE_PRESET_LABELS[$DEFAULT_PRESET_PLAYERBOTS]:-🤖 Playerbots + Suggested modules}" printf " %s (%s)\n" "Baseline solo-friendly quality of life mix" "azerothcore-wotlk"
echo "3) ⚙️ Manual selection" printf " %s) %s\n" "2" "🤖 Playerbots + Suggested modules"
echo "4) 🚫 No modules" printf " %s (%s)\n" "Suggested stack plus playerbots enabled" "azerothcore-playerbots"
printf " %s) %s\n" "3" "⚙️ Manual selection"
printf " %s (%s)\n" "Choose individual modules manually" "(depends on modules)"
printf " %s) %s\n" "4" "🚫 No modules"
printf " %s (%s)\n" "Pure AzerothCore with no modules" "azerothcore-wotlk"
local menu_index=5 local menu_index=5
declare -A MENU_PRESET_INDEX=() declare -A MENU_PRESET_INDEX=()
@@ -1118,13 +1180,16 @@ fi
for entry in "${ORDERED_PRESETS[@]}"; do for entry in "${ORDERED_PRESETS[@]}"; do
local preset_name="${entry#*::}" local preset_name="${entry#*::}"
[ -n "${MODULE_PRESET_CONFIGS[$preset_name]:-}" ] || continue [ -n "${MODULE_PRESET_CONFIGS[$preset_name]:-}" ] || continue
local pretty_name local pretty_name preset_desc
if [ -n "${MODULE_PRESET_LABELS[$preset_name]:-}" ]; then if [ -n "${MODULE_PRESET_LABELS[$preset_name]:-}" ]; then
pretty_name="${MODULE_PRESET_LABELS[$preset_name]}" pretty_name="${MODULE_PRESET_LABELS[$preset_name]}"
else else
pretty_name=$(echo "$preset_name" | tr '_-' ' ' | awk '{for(i=1;i<=NF;i++){$i=toupper(substr($i,1,1)) substr($i,2)}}1') pretty_name=$(echo "$preset_name" | tr '_-' ' ' | awk '{for(i=1;i<=NF;i++){$i=toupper(substr($i,1,1)) substr($i,2)}}1')
fi fi
echo "${menu_index}) ${pretty_name} (config/module-profiles/${preset_name}.json)" preset_desc="${MODULE_PRESET_DESCRIPTIONS[$preset_name]:-No description available}"
local source_branch=$(get_preset_source_branch "$preset_name")
printf " %s) %s\n" "$menu_index" "$pretty_name"
printf " %s (%s)\n" "$preset_desc" "$source_branch"
MENU_PRESET_INDEX[$menu_index]="$preset_name" MENU_PRESET_INDEX[$menu_index]="$preset_name"
menu_index=$((menu_index + 1)) menu_index=$((menu_index + 1))
done done
@@ -1422,11 +1487,16 @@ fi
MODULES_CPP_LIST="$(IFS=','; printf '%s' "${enabled_cpp_module_keys[*]}")" MODULES_CPP_LIST="$(IFS=','; printf '%s' "${enabled_cpp_module_keys[*]}")"
fi fi
local STACK_IMAGE_MODE="standard" # Determine source variant based ONLY on playerbots module
local STACK_SOURCE_VARIANT="core" local STACK_SOURCE_VARIANT="core"
if [ "$MODULE_PLAYERBOTS" = "1" ] || [ "$PLAYERBOT_ENABLED" = "1" ]; then if [ "$MODULE_PLAYERBOTS" = "1" ] || [ "$PLAYERBOT_ENABLED" = "1" ]; then
STACK_IMAGE_MODE="playerbots"
STACK_SOURCE_VARIANT="playerbots" STACK_SOURCE_VARIANT="playerbots"
fi
# Determine image mode based on source variant and build requirements
local STACK_IMAGE_MODE="standard"
if [ "$STACK_SOURCE_VARIANT" = "playerbots" ]; then
STACK_IMAGE_MODE="playerbots"
elif [ "$NEEDS_CXX_REBUILD" = "1" ]; then elif [ "$NEEDS_CXX_REBUILD" = "1" ]; then
STACK_IMAGE_MODE="modules" STACK_IMAGE_MODE="modules"
fi fi
@@ -1522,7 +1592,7 @@ fi
fi fi
local default_source_rel="${LOCAL_STORAGE_ROOT}/source/azerothcore" local default_source_rel="${LOCAL_STORAGE_ROOT}/source/azerothcore"
if [ "$NEEDS_CXX_REBUILD" = "1" ] || [ "$MODULE_PLAYERBOTS" = "1" ]; then if [ "$STACK_SOURCE_VARIANT" = "playerbots" ]; then
default_source_rel="${LOCAL_STORAGE_ROOT}/source/azerothcore-playerbots" default_source_rel="${LOCAL_STORAGE_ROOT}/source/azerothcore-playerbots"
fi fi
@@ -1604,7 +1674,7 @@ fi
{ {
cat <<EOF cat <<EOF
# Generated by azerothcore-rm/setup.sh # Generated by setup.sh
# Compose overrides (set to 1 to include matching file under compose-overrides/) # Compose overrides (set to 1 to include matching file under compose-overrides/)
# mysql-expose.yml -> exposes MySQL externally via COMPOSE_OVERRIDE_MYSQL_EXPOSE_ENABLED # mysql-expose.yml -> exposes MySQL externally via COMPOSE_OVERRIDE_MYSQL_EXPOSE_ENABLED
@@ -1616,6 +1686,15 @@ COMPOSE_PROJECT_NAME=$DEFAULT_COMPOSE_PROJECT_NAME
STORAGE_PATH=$STORAGE_PATH STORAGE_PATH=$STORAGE_PATH
STORAGE_PATH_LOCAL=$LOCAL_STORAGE_ROOT STORAGE_PATH_LOCAL=$LOCAL_STORAGE_ROOT
STORAGE_CONFIG_PATH=$(get_template_value "STORAGE_CONFIG_PATH")
STORAGE_LOGS_PATH=$(get_template_value "STORAGE_LOGS_PATH")
STORAGE_MODULES_PATH=$(get_template_value "STORAGE_MODULES_PATH")
STORAGE_LUA_SCRIPTS_PATH=$(get_template_value "STORAGE_LUA_SCRIPTS_PATH")
STORAGE_MODULES_META_PATH=$(get_template_value "STORAGE_MODULES_META_PATH")
STORAGE_MODULE_SQL_PATH=$(get_template_value "STORAGE_MODULE_SQL_PATH")
STORAGE_INSTALL_MARKERS_PATH=$(get_template_value "STORAGE_INSTALL_MARKERS_PATH")
STORAGE_CLIENT_DATA_PATH=$(get_template_value "STORAGE_CLIENT_DATA_PATH")
STORAGE_LOCAL_SOURCE_PATH=$(get_template_value "STORAGE_LOCAL_SOURCE_PATH")
BACKUP_PATH=$BACKUP_PATH BACKUP_PATH=$BACKUP_PATH
TZ=$DEFAULT_TZ TZ=$DEFAULT_TZ
@@ -1684,10 +1763,31 @@ CONTAINER_USER=$CONTAINER_USER
CONTAINER_MYSQL=$DEFAULT_CONTAINER_MYSQL CONTAINER_MYSQL=$DEFAULT_CONTAINER_MYSQL
CONTAINER_DB_IMPORT=$DEFAULT_CONTAINER_DB_IMPORT CONTAINER_DB_IMPORT=$DEFAULT_CONTAINER_DB_IMPORT
CONTAINER_DB_INIT=$DEFAULT_CONTAINER_DB_INIT CONTAINER_DB_INIT=$DEFAULT_CONTAINER_DB_INIT
CONTAINER_DB_GUARD=$(get_template_value "CONTAINER_DB_GUARD")
CONTAINER_BACKUP=$DEFAULT_CONTAINER_BACKUP CONTAINER_BACKUP=$DEFAULT_CONTAINER_BACKUP
CONTAINER_MODULES=$DEFAULT_CONTAINER_MODULES CONTAINER_MODULES=$DEFAULT_CONTAINER_MODULES
CONTAINER_POST_INSTALL=$DEFAULT_CONTAINER_POST_INSTALL CONTAINER_POST_INSTALL=$DEFAULT_CONTAINER_POST_INSTALL
# Database Guard Defaults
DB_GUARD_RECHECK_SECONDS=$(get_template_value "DB_GUARD_RECHECK_SECONDS")
DB_GUARD_RETRY_SECONDS=$(get_template_value "DB_GUARD_RETRY_SECONDS")
DB_GUARD_WAIT_ATTEMPTS=$(get_template_value "DB_GUARD_WAIT_ATTEMPTS")
DB_GUARD_HEALTH_MAX_AGE=$(get_template_value "DB_GUARD_HEALTH_MAX_AGE")
DB_GUARD_HEALTHCHECK_INTERVAL=$(get_template_value "DB_GUARD_HEALTHCHECK_INTERVAL")
DB_GUARD_HEALTHCHECK_TIMEOUT=$(get_template_value "DB_GUARD_HEALTHCHECK_TIMEOUT")
DB_GUARD_HEALTHCHECK_RETRIES=$(get_template_value "DB_GUARD_HEALTHCHECK_RETRIES")
DB_GUARD_VERIFY_INTERVAL_SECONDS=$(get_template_value "DB_GUARD_VERIFY_INTERVAL_SECONDS")
# Module SQL staging
STAGE_PATH_MODULE_SQL=$(get_template_value "STAGE_PATH_MODULE_SQL")
# Modules rebuild source path
MODULES_REBUILD_SOURCE_PATH=$MODULES_REBUILD_SOURCE_PATH
# SQL Source Overlay
SOURCE_DIR=$(get_template_value "SOURCE_DIR")
AC_SQL_SOURCE_PATH=$(get_template_value "AC_SQL_SOURCE_PATH")
# Ports # Ports
AUTH_EXTERNAL_PORT=$AUTH_EXTERNAL_PORT AUTH_EXTERNAL_PORT=$AUTH_EXTERNAL_PORT
AUTH_PORT=$DEFAULT_AUTH_INTERNAL_PORT AUTH_PORT=$DEFAULT_AUTH_INTERNAL_PORT
@@ -1704,6 +1804,8 @@ REALM_PORT=$REALM_PORT
BACKUP_RETENTION_DAYS=$BACKUP_RETENTION_DAYS BACKUP_RETENTION_DAYS=$BACKUP_RETENTION_DAYS
BACKUP_RETENTION_HOURS=$BACKUP_RETENTION_HOURS BACKUP_RETENTION_HOURS=$BACKUP_RETENTION_HOURS
BACKUP_DAILY_TIME=$BACKUP_DAILY_TIME BACKUP_DAILY_TIME=$BACKUP_DAILY_TIME
BACKUP_INTERVAL_MINUTES=$(get_template_value "BACKUP_INTERVAL_MINUTES")
BACKUP_EXTRA_DATABASES=$(get_template_value "BACKUP_EXTRA_DATABASES")
BACKUP_HEALTHCHECK_MAX_MINUTES=$BACKUP_HEALTHCHECK_MAX_MINUTES BACKUP_HEALTHCHECK_MAX_MINUTES=$BACKUP_HEALTHCHECK_MAX_MINUTES
BACKUP_HEALTHCHECK_GRACE_SECONDS=$BACKUP_HEALTHCHECK_GRACE_SECONDS BACKUP_HEALTHCHECK_GRACE_SECONDS=$BACKUP_HEALTHCHECK_GRACE_SECONDS
@@ -1711,10 +1813,13 @@ EOF
echo echo
echo "# Modules" echo "# Modules"
for module_key in "${MODULE_KEYS[@]}"; do for module_key in "${MODULE_KEYS[@]}"; do
printf "%s=%s\n" "$module_key" "${!module_key:-0}" local module_value="${!module_key:-0}"
# Only write enabled modules (value=1) to .env
if [ "$module_value" = "1" ]; then
printf "%s=%s\n" "$module_key" "$module_value"
fi
done done
cat <<EOF cat <<EOF
MODULES_REBUILD_SOURCE_PATH=$MODULES_REBUILD_SOURCE_PATH
# Client data # Client data
CLIENT_DATA_VERSION=${CLIENT_DATA_VERSION:-$DEFAULT_CLIENT_DATA_VERSION} CLIENT_DATA_VERSION=${CLIENT_DATA_VERSION:-$DEFAULT_CLIENT_DATA_VERSION}