13 Commits

Author SHA1 Message Date
uprightbass360
c57c388d26 chore: sync module manifest 2026-01-29 20:59:06 -05:00
uprightbass360
6170c6e43b feat: db migration support 2026-01-29 18:17:53 -05:00
uprightbass360
74bbc464ac feat: quick little docker setup 2026-01-26 16:43:26 -05:00
uprightbass360
1edc675530 Merge branch 'main' of https://github.com/uprightbass360/AzerothCore-RelmMaster 2026-01-26 12:48:59 -05:00
uprightbass360
56769e81d5 fix: server startup hardening 2026-01-26 12:48:07 -05:00
uprightbass360
654d81bb38 chore: sync module manifest 2026-01-26 12:31:47 -05:00
uprightbass360
6a05fe785f chore: add realmlist update for patching 2026-01-23 15:02:45 -05:00
uprightbass360
c46f25a1a9 chore: revise reccommended mods 2026-01-23 15:01:36 -05:00
uprightbass360
517adc4225 chore: sync module manifest 2026-01-22 22:05:23 -05:00
uprightbass360
f4e88abde8 chore: sync module manifest 2026-01-12 00:43:55 -05:00
uprightbass360
305da352c0 fix: correct module list output in build summary
The redirect operator was placed after '|| true', causing the module
list to not be appended to GITHUB_STEP_SUMMARY. Moved the redirect
before the fallback operator to ensure output is captured.
2026-01-10 18:22:46 -05:00
uprightbass360
61436263ad fix: resolve YAML syntax errors in create-release workflow
Replaced heredocs with echo statements to avoid YAML parsing issues.
The YAML parser was interpreting markdown headings and other content
within heredocs as YAML syntax, causing validation errors.

Using grouped echo statements ({ echo ...; } > file) works correctly
with GitHub Actions YAML parser while maintaining variable expansion.

Also updated documentation to use generic placeholder (your-dockerhub-username)
in examples instead of hardcoded username.
2026-01-10 18:16:29 -05:00
uprightbass360
61137b5510 adds workflow build for RealmMaster profile 2026-01-10 17:09:22 -05:00
19 changed files with 1205 additions and 36 deletions

View File

@@ -29,10 +29,15 @@ COMPOSE_PROJECT_NAME=azerothcore-realmmaster
# Module Profile Selection # Module Profile Selection
# ===================== # =====================
# Choose which module profile build to use: # Choose which module profile build to use:
# - realmmaster: 32 modules (playerbots, transmog, solo-lfg, eluna, etc.) - Recommended # - realmmaster: 32 modules (playerbots, transmog, solo-lfg, eluna, etc.) - ✅ Available now (Recommended)
#
# Additional profiles (available soon - will be built on demand):
# - suggested-modules: Alternative suggested module set # - suggested-modules: Alternative suggested module set
# - all-modules: All supported modules # - all-modules: All supported modules
# - playerbots-only: Just playerbots # - playerbots-only: Just playerbots
#
# Note: Only 'realmmaster' images are currently published to Docker Hub.
# Other profiles will be available when built via GitHub Actions workflow.
MODULE_PROFILE=realmmaster MODULE_PROFILE=realmmaster
# ===================== # =====================
@@ -55,6 +60,11 @@ AC_WORLDSERVER_IMAGE_MODULES=${DOCKERHUB_USERNAME}/${COMPOSE_PROJECT_NAME}:world
AC_AUTHSERVER_IMAGE=acore/ac-wotlk-authserver:master AC_AUTHSERVER_IMAGE=acore/ac-wotlk-authserver:master
AC_WORLDSERVER_IMAGE=acore/ac-wotlk-worldserver:master AC_WORLDSERVER_IMAGE=acore/ac-wotlk-worldserver:master
# Playerbots images (referenced by docker-compose, even if using modules profile)
AC_AUTHSERVER_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:authserver-playerbots
AC_WORLDSERVER_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:worldserver-playerbots
AC_CLIENT_DATA_IMAGE_PLAYERBOTS=${COMPOSE_PROJECT_NAME}:client-data-playerbots
# Database and client data images # Database and client data images
AC_DB_IMPORT_IMAGE=acore/ac-wotlk-db-import:master AC_DB_IMPORT_IMAGE=acore/ac-wotlk-db-import:master
AC_CLIENT_DATA_IMAGE=acore/ac-wotlk-client-data:master AC_CLIENT_DATA_IMAGE=acore/ac-wotlk-client-data:master
@@ -95,21 +105,21 @@ NETWORK_GATEWAY=172.20.0.1
# ===================== # =====================
# Change this to your server's public IP or domain name # Change this to your server's public IP or domain name
SERVER_ADDRESS=127.0.0.1 SERVER_ADDRESS=127.0.0.1
REALM_PORT=8215 REALM_PORT=8085
# ===================== # =====================
# Ports # Ports
# ===================== # =====================
# Authentication server # Authentication server
AUTH_EXTERNAL_PORT=3784 AUTH_EXTERNAL_PORT=3724
AUTH_PORT=3724 AUTH_PORT=3724
# World server # World server
WORLD_EXTERNAL_PORT=8215 WORLD_EXTERNAL_PORT=8085
WORLD_PORT=8085 WORLD_PORT=8085
# SOAP/Remote access # SOAP/Remote access
SOAP_EXTERNAL_PORT=7778 SOAP_EXTERNAL_PORT=7878
SOAP_PORT=7878 SOAP_PORT=7878
# MySQL database (for external access) # MySQL database (for external access)
@@ -183,6 +193,11 @@ BACKUP_HEALTHCHECK_GRACE_SECONDS=4500
# These settings tell the system that modules are already built into the images # These settings tell the system that modules are already built into the images
STACK_IMAGE_MODE=modules STACK_IMAGE_MODE=modules
STACK_SOURCE_VARIANT=playerbots STACK_SOURCE_VARIANT=playerbots
# Key modules enabled (needed for profile detection)
# The RealmMaster profile includes playerbots, so we need this set for deploy.sh to use the correct profile
MODULE_PLAYERBOTS=1
# Note: MODULES_ENABLED_LIST varies by profile - the list below is for the 'realmmaster' profile # Note: MODULES_ENABLED_LIST varies by profile - the list below is for the 'realmmaster' profile
# For other profiles, see the corresponding JSON in config/module-profiles/ # For other profiles, see the corresponding JSON in config/module-profiles/
MODULES_ENABLED_LIST=MODULE_PLAYERBOTS,MODULE_TRANSMOG,MODULE_SOLO_LFG,MODULE_ELUNA,MODULE_AIO,MODULE_NPC_BUFFER,MODULE_NPC_BEASTMASTER,MODULE_SOLOCRAFT,MODULE_1V1_ARENA,MODULE_ACCOUNT_ACHIEVEMENTS,MODULE_ACTIVE_CHAT,MODULE_ARAC,MODULE_ASSISTANT,MODULE_AUTO_REVIVE,MODULE_BLACK_MARKET_AUCTION_HOUSE,MODULE_BOSS_ANNOUNCER,MODULE_BREAKING_NEWS,MODULE_ELUNA_SCRIPTS,MODULE_EVENT_SCRIPTS,MODULE_FIREWORKS,MODULE_GAIN_HONOR_GUARD,MODULE_GLOBAL_CHAT,MODULE_GUILDHOUSE,MODULE_INSTANCE_RESET,MODULE_ITEM_LEVEL_UP,MODULE_LEARN_SPELLS,MODULE_MORPHSUMMON,MODULE_NPC_ENCHANTER,MODULE_NPC_FREE_PROFESSIONS,MODULE_RANDOM_ENCHANTS,MODULE_REAGENT_BANK,MODULE_TIME_IS_TIME MODULES_ENABLED_LIST=MODULE_PLAYERBOTS,MODULE_TRANSMOG,MODULE_SOLO_LFG,MODULE_ELUNA,MODULE_AIO,MODULE_NPC_BUFFER,MODULE_NPC_BEASTMASTER,MODULE_SOLOCRAFT,MODULE_1V1_ARENA,MODULE_ACCOUNT_ACHIEVEMENTS,MODULE_ACTIVE_CHAT,MODULE_ARAC,MODULE_ASSISTANT,MODULE_AUTO_REVIVE,MODULE_BLACK_MARKET_AUCTION_HOUSE,MODULE_BOSS_ANNOUNCER,MODULE_BREAKING_NEWS,MODULE_ELUNA_SCRIPTS,MODULE_EVENT_SCRIPTS,MODULE_FIREWORKS,MODULE_GAIN_HONOR_GUARD,MODULE_GLOBAL_CHAT,MODULE_GUILDHOUSE,MODULE_INSTANCE_RESET,MODULE_ITEM_LEVEL_UP,MODULE_LEARN_SPELLS,MODULE_MORPHSUMMON,MODULE_NPC_ENCHANTER,MODULE_NPC_FREE_PROFESSIONS,MODULE_RANDOM_ENCHANTS,MODULE_REAGENT_BANK,MODULE_TIME_IS_TIME

View File

@@ -118,11 +118,11 @@ ALPINE_IMAGE=alpine:latest
# ===================== # =====================
# Ports # Ports
# ===================== # =====================
AUTH_EXTERNAL_PORT=3784 AUTH_EXTERNAL_PORT=3724
AUTH_PORT=3724 AUTH_PORT=3724
WORLD_EXTERNAL_PORT=8215 WORLD_EXTERNAL_PORT=8085
WORLD_PORT=8085 WORLD_PORT=8085
SOAP_EXTERNAL_PORT=7778 SOAP_EXTERNAL_PORT=7878
SOAP_PORT=7878 SOAP_PORT=7878
# ===================== # =====================
@@ -136,7 +136,7 @@ NETWORK_GATEWAY=172.20.0.1
# Server address / realm # Server address / realm
# ===================== # =====================
SERVER_ADDRESS=127.0.0.1 SERVER_ADDRESS=127.0.0.1
REALM_PORT=8215 REALM_PORT=8085
# ===================== # =====================
# MySQL / Database Layer # MySQL / Database Layer
@@ -559,3 +559,8 @@ MODULE_MOD_PYTHON_ENGINE=0
MODULE_WRATH_OF_THE_VANILLA_V2=0 MODULE_WRATH_OF_THE_VANILLA_V2=0
MODULE_DUELS=0 MODULE_DUELS=0
MODULE_WOW_CORE=0 MODULE_WOW_CORE=0
MODULE_CLANCENTAUR=0
MODULE_DELVES=0
MODULE_MOD_DISABLE_ACHIEVEMENTS=0
MODULE_LUA_BATTLEPASS=0
MODULE_MOD_GM_DISCORD=0

View File

@@ -118,6 +118,14 @@ jobs:
grep '^STORAGE_PATH_LOCAL=' .env || echo " STORAGE_PATH_LOCAL not found" grep '^STORAGE_PATH_LOCAL=' .env || echo " STORAGE_PATH_LOCAL not found"
grep '^MODULES_REBUILD_SOURCE_PATH=' .env || echo " MODULES_REBUILD_SOURCE_PATH not found" grep '^MODULES_REBUILD_SOURCE_PATH=' .env || echo " MODULES_REBUILD_SOURCE_PATH not found"
grep '^STACK_SOURCE_VARIANT=' .env || echo " STACK_SOURCE_VARIANT not found" grep '^STACK_SOURCE_VARIANT=' .env || echo " STACK_SOURCE_VARIANT not found"
echo ""
# Verify all Docker images are configured
echo "🐳 Docker Images (that we build and push):"
grep -E '^AC_AUTHSERVER_IMAGE_PLAYERBOTS=' .env || echo " AC_AUTHSERVER_IMAGE_PLAYERBOTS not found"
grep -E '^AC_WORLDSERVER_IMAGE_PLAYERBOTS=' .env || echo " AC_WORLDSERVER_IMAGE_PLAYERBOTS not found"
grep -E '^AC_AUTHSERVER_IMAGE_MODULES=' .env || echo " AC_AUTHSERVER_IMAGE_MODULES not found"
grep -E '^AC_WORLDSERVER_IMAGE_MODULES=' .env || echo " AC_WORLDSERVER_IMAGE_MODULES not found"
- name: Cache Go build cache - name: Cache Go build cache
uses: actions/cache@v4 uses: actions/cache@v4
@@ -222,7 +230,7 @@ jobs:
echo "<summary>View enabled modules</summary>" >> $GITHUB_STEP_SUMMARY echo "<summary>View enabled modules</summary>" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
grep '^MODULE_.*=1' .env | sed 's/=1//' || true >> $GITHUB_STEP_SUMMARY grep '^MODULE_.*=1' .env | sed 's/=1//' >> $GITHUB_STEP_SUMMARY || true
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
echo "</details>" >> $GITHUB_STEP_SUMMARY echo "</details>" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY

246
.github/workflows/create-release.yml vendored Normal file
View File

@@ -0,0 +1,246 @@
name: Create Release
on:
workflow_dispatch:
inputs:
version:
description: 'Release version (e.g., v1.0.0)'
required: true
type: string
profile:
description: 'Module profile for this release'
required: false
type: string
default: 'RealmMaster'
prerelease:
description: 'Mark as pre-release'
required: false
type: boolean
default: false
jobs:
create-release:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Prepare release variables
run: |
VERSION="${{ github.event.inputs.version }}"
PROFILE="${{ github.event.inputs.profile }}"
PROFILE_TAG=$(echo "${PROFILE}" | tr '[:upper:]' '[:lower:]' | tr '_' '-')
echo "VERSION=${VERSION}" >> $GITHUB_ENV
echo "PROFILE=${PROFILE}" >> $GITHUB_ENV
echo "PROFILE_TAG=${PROFILE_TAG}" >> $GITHUB_ENV
# Get build date from Docker Hub image (or use current date)
BUILD_DATE=$(date +%Y%m%d)
echo "BUILD_DATE=${BUILD_DATE}" >> $GITHUB_ENV
# Get AzerothCore commit from local-storage if available
if [ -d "local-storage/source/azerothcore-playerbots" ]; then
ACORE_COMMIT=$(cd local-storage/source/azerothcore-playerbots && git rev-parse --short HEAD)
else
ACORE_COMMIT="unknown"
fi
echo "ACORE_COMMIT=${ACORE_COMMIT}" >> $GITHUB_ENV
- name: Read module list from profile
run: |
PROFILE_FILE="config/module-profiles/${PROFILE}.json"
if [ ! -f "$PROFILE_FILE" ]; then
echo "ERROR: Profile file not found: $PROFILE_FILE"
exit 1
fi
# Extract module count
MODULE_COUNT=$(python3 -c "import json; data=json.load(open('$PROFILE_FILE')); print(len(data.get('modules', [])))")
echo "MODULE_COUNT=${MODULE_COUNT}" >> $GITHUB_ENV
# Extract modules for release notes
python3 -c "import json; data=json.load(open('$PROFILE_FILE')); print('\n'.join(['- ' + m for m in data.get('modules', [])]))" > modules.txt
- name: Create deployment package
run: |
PACKAGE_NAME="azerothcore-realmmaster-${VERSION}-${PROFILE_TAG}"
mkdir -p "${PACKAGE_NAME}"
# Copy essential deployment files
cp .env.prebuilt "${PACKAGE_NAME}/.env.prebuilt"
cp docker-compose.yml "${PACKAGE_NAME}/docker-compose.yml"
cp deploy.sh "${PACKAGE_NAME}/deploy.sh"
cp status.sh "${PACKAGE_NAME}/status.sh"
cp cleanup.sh "${PACKAGE_NAME}/cleanup.sh"
cp README.md "${PACKAGE_NAME}/README.md"
# Copy scripts directory
cp -r scripts "${PACKAGE_NAME}/scripts"
# Copy config directory
cp -r config "${PACKAGE_NAME}/config"
# Copy docs directory
cp -r docs "${PACKAGE_NAME}/docs"
# Create a quick start guide specific to this release
{
echo "# Quick Start - AzerothCore RealmMaster ${VERSION}"
echo ""
echo "## Module Profile: ${PROFILE}"
echo "${MODULE_COUNT} modules included"
echo ""
echo "## Docker Images"
echo "This release uses the following pre-built images:"
echo "- \`\${DOCKERHUB_USERNAME}/azerothcore-realmmaster:authserver-${PROFILE_TAG}-${BUILD_DATE}\`"
echo "- \`\${DOCKERHUB_USERNAME}/azerothcore-realmmaster:worldserver-${PROFILE_TAG}-${BUILD_DATE}\`"
echo ""
echo "Or use the latest tags:"
echo "- \`\${DOCKERHUB_USERNAME}/azerothcore-realmmaster:authserver-${PROFILE_TAG}-latest\`"
echo "- \`\${DOCKERHUB_USERNAME}/azerothcore-realmmaster:worldserver-${PROFILE_TAG}-latest\`"
echo ""
echo "## Installation"
echo ""
echo "1. **Edit .env.prebuilt**:"
echo " \`\`\`bash"
echo " nano .env.prebuilt"
echo " # Set: DOCKERHUB_USERNAME=your-dockerhub-username"
echo " \`\`\`"
echo ""
echo "2. **Rename to .env**:"
echo " \`\`\`bash"
echo " mv .env.prebuilt .env"
echo " \`\`\`"
echo ""
echo "3. **Deploy**:"
echo " \`\`\`bash"
echo " chmod +x deploy.sh status.sh cleanup.sh"
echo " ./deploy.sh"
echo " \`\`\`"
echo ""
echo "4. **Check status**:"
echo " \`\`\`bash"
echo " ./status.sh"
echo " \`\`\`"
echo ""
echo "## Documentation"
echo "- [Pre-Built Images Guide](docs/PREBUILT_IMAGES.md)"
echo "- [Getting Started](docs/GETTING_STARTED.md)"
echo "- [Troubleshooting](docs/TROUBLESHOOTING.md)"
echo ""
echo "## Support"
echo "- GitHub Issues: https://github.com/uprightbass360/AzerothCore-RealmMaster/issues"
echo "- AzerothCore Discord: https://discord.gg/gkt4y2x"
} > "${PACKAGE_NAME}/QUICKSTART.md"
# Make scripts executable
chmod +x "${PACKAGE_NAME}/deploy.sh"
chmod +x "${PACKAGE_NAME}/status.sh"
chmod +x "${PACKAGE_NAME}/cleanup.sh"
# Create zip archive
zip -r "${PACKAGE_NAME}.zip" "${PACKAGE_NAME}"
echo "PACKAGE_NAME=${PACKAGE_NAME}" >> $GITHUB_ENV
- name: Generate release notes
run: |
{
echo "# AzerothCore RealmMaster ${VERSION} - ${PROFILE} Profile"
echo ""
echo "## 🎯 Module Profile: ${PROFILE}"
echo "${MODULE_COUNT} modules included"
echo ""
echo "## 📦 Docker Images"
echo ""
echo "Pull these pre-built images from Docker Hub:"
echo ""
echo "**Date-specific (recommended for production)**:"
echo "\`\`\`bash"
echo "docker pull \${DOCKERHUB_USERNAME}/azerothcore-realmmaster:authserver-${PROFILE_TAG}-${BUILD_DATE}"
echo "docker pull \${DOCKERHUB_USERNAME}/azerothcore-realmmaster:worldserver-${PROFILE_TAG}-${BUILD_DATE}"
echo "\`\`\`"
echo ""
echo "**Latest (auto-updated nightly)**:"
echo "\`\`\`bash"
echo "docker pull \${DOCKERHUB_USERNAME}/azerothcore-realmmaster:authserver-${PROFILE_TAG}-latest"
echo "docker pull \${DOCKERHUB_USERNAME}/azerothcore-realmmaster:worldserver-${PROFILE_TAG}-latest"
echo "\`\`\`"
echo ""
echo "## 🚀 Quick Start"
echo ""
echo "\`\`\`bash"
echo "# Download and extract"
echo "wget https://github.com/uprightbass360/AzerothCore-RealmMaster/releases/download/${VERSION}/${PACKAGE_NAME}.zip"
echo "unzip ${PACKAGE_NAME}.zip"
echo "cd ${PACKAGE_NAME}"
echo ""
echo "# Configure Docker Hub username"
echo "nano .env.prebuilt"
echo "# Set: DOCKERHUB_USERNAME=your-dockerhub-username"
echo ""
echo "# Deploy"
echo "mv .env.prebuilt .env"
echo "./deploy.sh"
echo "\`\`\`"
echo ""
echo "Full documentation in \`docs/PREBUILT_IMAGES.md\`"
echo ""
echo "## 📋 Included Modules"
echo ""
cat modules.txt
echo ""
echo "## 📊 Build Information"
echo ""
echo "- **Built**: ${BUILD_DATE}"
echo "- **AzerothCore Commit**: ${ACORE_COMMIT}"
echo "- **Source Variant**: playerbots (for MODULE_PLAYERBOTS support)"
echo "- **Profile**: ${PROFILE}"
echo "- **Module Count**: ${MODULE_COUNT}"
echo ""
echo "## 📖 Documentation"
echo ""
echo "Full documentation available in the \`docs/\` directory of the release package:"
echo "- [Pre-Built Images Guide](https://github.com/uprightbass360/AzerothCore-RealmMaster/blob/${VERSION}/docs/PREBUILT_IMAGES.md)"
echo "- [Getting Started Guide](https://github.com/uprightbass360/AzerothCore-RealmMaster/blob/${VERSION}/docs/GETTING_STARTED.md)"
echo "- [Module Catalog](https://github.com/uprightbass360/AzerothCore-RealmMaster/blob/${VERSION}/docs/MODULES.md)"
echo "- [Troubleshooting](https://github.com/uprightbass360/AzerothCore-RealmMaster/blob/${VERSION}/docs/TROUBLESHOOTING.md)"
echo ""
echo "## 🐛 Known Issues"
echo ""
echo "None at this time. Report issues at: https://github.com/uprightbass360/AzerothCore-RealmMaster/issues"
echo ""
echo "## 💬 Support"
echo ""
echo "- **GitHub Issues**: https://github.com/uprightbass360/AzerothCore-RealmMaster/issues"
echo "- **AzerothCore Discord**: https://discord.gg/gkt4y2x"
echo "- **Documentation**: https://github.com/uprightbass360/AzerothCore-RealmMaster/tree/${VERSION}/docs"
} > release_notes.md
- name: Create GitHub Release
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ env.VERSION }}
name: "RealmMaster ${{ env.VERSION }} - ${{ env.PROFILE }} Profile"
body_path: release_notes.md
files: |
${{ env.PACKAGE_NAME }}.zip
prerelease: ${{ github.event.inputs.prerelease }}
draft: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Release summary
run: |
echo "## Release Created Successfully! 🎉" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Version**: ${{ env.VERSION }}" >> $GITHUB_STEP_SUMMARY
echo "**Profile**: ${{ env.PROFILE }}" >> $GITHUB_STEP_SUMMARY
echo "**Modules**: ${{ env.MODULE_COUNT }}" >> $GITHUB_STEP_SUMMARY
echo "**Package**: ${{ env.PACKAGE_NAME }}.zip" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "📦 Release available at:" >> $GITHUB_STEP_SUMMARY
echo "https://github.com/${{ github.repository }}/releases/tag/${{ env.VERSION }}" >> $GITHUB_STEP_SUMMARY

View File

@@ -66,6 +66,8 @@ cp .env.prebuilt .env
Pre-built images include the **RealmMaster profile** (32 modules) and are automatically built nightly. See **[docs/PREBUILT_IMAGES.md](docs/PREBUILT_IMAGES.md)** for details. Pre-built images include the **RealmMaster profile** (32 modules) and are automatically built nightly. See **[docs/PREBUILT_IMAGES.md](docs/PREBUILT_IMAGES.md)** for details.
**Note:** Remote deployments require one additional step after migration - see [Remote Deployment Guide](docs/GETTING_STARTED.md#remote-deployment).
See [Getting Started](#getting-started) for detailed walkthrough. See [Getting Started](#getting-started) for detailed walkthrough.
## What You Get ## What You Get

View File

@@ -5517,6 +5517,76 @@
"requires": [], "requires": [],
"post_install_hooks": [], "post_install_hooks": [],
"config_cleanup": [] "config_cleanup": []
},
{
"key": "MODULE_DELVES",
"name": "Delves",
"repo": "https://github.com/araxiaonline/Delves.git",
"description": "List of the Custom Made Single Player Delves for Araxia Online",
"type": "lua",
"category": "scripting",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_CLANCENTAUR",
"name": "ClanCentaur",
"repo": "https://github.com/araxiaonline/ClanCentaur.git",
"description": "Custom SQL modifications and patch notes for new faction rewards, reputation items, and unique vendors on the Araxia WoW 3.3.5a server.",
"type": "sql",
"category": "database",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_MOD_DISABLE_ACHIEVEMENTS",
"name": "mod-disable-achievements",
"repo": "https://github.com/olive-spore-734/mod-disable-achievements.git",
"description": "SQL with a long list of WotLK Achievements and their IDs, which should make it much easier to find and disable some. Made for AzerothCore.",
"type": "sql",
"category": "database",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_LUA_BATTLEPASS",
"name": "lua-battlepass",
"repo": "https://github.com/Shonik/lua-battlepass.git",
"description": "Battle Pass System for AzerothCore",
"type": "lua",
"category": "scripting",
"notes": "Discovered via GitHub topic 'azerothcore-lua'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
},
{
"key": "MODULE_MOD_GM_DISCORD",
"name": "mod-gm-discord",
"repo": "https://github.com/Diabloxx/mod-gm-discord.git",
"description": "GM to Discord Tools",
"type": "cpp",
"category": "uncategorized",
"notes": "Discovered via GitHub topic 'azerothcore-module'",
"status": "active",
"order": 5000,
"requires": [],
"post_install_hooks": [],
"config_cleanup": []
} }
] ]
} }

View File

@@ -21,10 +21,10 @@
"MODULE_ARAC", "MODULE_ARAC",
"MODULE_ASSISTANT", "MODULE_ASSISTANT",
"MODULE_REAGENT_BANK", "MODULE_REAGENT_BANK",
"MODULE_BLACK_MARKET_AUCTION_HOUSE",
"MODULE_ELUNA", "MODULE_ELUNA",
"MODULE_AIO", "MODULE_AIO",
"MODULE_ELUNA_SCRIPTS", "MODULE_ELUNA_SCRIPTS",
"MODULE_LUA_AH_BOT",
"MODULE_EVENT_SCRIPTS", "MODULE_EVENT_SCRIPTS",
"MODULE_ACTIVE_CHAT", "MODULE_ACTIVE_CHAT",
"MODULE_GUILDHOUSE", "MODULE_GUILDHOUSE",

View File

@@ -0,0 +1,13 @@
# AzerothCore RealmMaster - Docker NFS Dependencies
# Ensures Docker waits for NFS mounts before starting to prevent race conditions
# where containers create local directories before NFS mounts are ready
[Unit]
# Wait for NFS mounts to be active before starting Docker
After=nfs-azerothcore.mount nfs-containers.mount
# Require the primary backup NFS mount (critical for data integrity)
Requires=nfs-azerothcore.mount
# Prefer the containers NFS mount but don't fail if unavailable
Wants=nfs-containers.mount

View File

@@ -32,6 +32,7 @@ REMOTE_IDENTITY=""
REMOTE_PROJECT_DIR="" REMOTE_PROJECT_DIR=""
REMOTE_SKIP_STORAGE=0 REMOTE_SKIP_STORAGE=0
REMOTE_COPY_SOURCE=0 REMOTE_COPY_SOURCE=0
REMOTE_SETUP_SOURCE=""
REMOTE_ARGS_PROVIDED=0 REMOTE_ARGS_PROVIDED=0
REMOTE_AUTO_DEPLOY=0 REMOTE_AUTO_DEPLOY=0
REMOTE_CLEAN_CONTAINERS=0 REMOTE_CLEAN_CONTAINERS=0
@@ -261,6 +262,8 @@ Options:
--remote-project-dir DIR Remote project directory (default: ~/<project-name>) --remote-project-dir DIR Remote project directory (default: ~/<project-name>)
--remote-skip-storage Skip syncing the storage directory during migration --remote-skip-storage Skip syncing the storage directory during migration
--remote-copy-source Copy the local project directory to remote instead of relying on git --remote-copy-source Copy the local project directory to remote instead of relying on git
--remote-setup-source Automatically run setup-source.sh on remote (clone AzerothCore SQL)
--remote-skip-source-setup Skip source repository setup (you'll run manually later)
--remote-auto-deploy Run './deploy.sh --yes --no-watch' on the remote host after migration --remote-auto-deploy Run './deploy.sh --yes --no-watch' on the remote host after migration
--remote-clean-containers Stop/remove remote containers & project images during migration --remote-clean-containers Stop/remove remote containers & project images during migration
--remote-storage-path PATH Override STORAGE_PATH/STORAGE_PATH_LOCAL in the remote .env --remote-storage-path PATH Override STORAGE_PATH/STORAGE_PATH_LOCAL in the remote .env
@@ -294,6 +297,8 @@ while [[ $# -gt 0 ]]; do
--remote-project-dir) REMOTE_PROJECT_DIR="$2"; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift 2;; --remote-project-dir) REMOTE_PROJECT_DIR="$2"; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift 2;;
--remote-skip-storage) REMOTE_SKIP_STORAGE=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;; --remote-skip-storage) REMOTE_SKIP_STORAGE=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;;
--remote-copy-source) REMOTE_COPY_SOURCE=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;; --remote-copy-source) REMOTE_COPY_SOURCE=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;;
--remote-setup-source) REMOTE_SETUP_SOURCE=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;;
--remote-skip-source-setup) REMOTE_SETUP_SOURCE=0; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;;
--remote-auto-deploy) REMOTE_AUTO_DEPLOY=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;; --remote-auto-deploy) REMOTE_AUTO_DEPLOY=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;;
--remote-clean-containers) REMOTE_CLEAN_CONTAINERS=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;; --remote-clean-containers) REMOTE_CLEAN_CONTAINERS=1; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift;;
--remote-storage-path) REMOTE_STORAGE_OVERRIDE="$2"; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift 2;; --remote-storage-path) REMOTE_STORAGE_OVERRIDE="$2"; REMOTE_MODE=1; REMOTE_ARGS_PROVIDED=1; shift 2;;
@@ -754,6 +759,10 @@ run_remote_migration(){
args+=(--env-file "$REMOTE_ENV_FILE") args+=(--env-file "$REMOTE_ENV_FILE")
fi fi
if [ -n "$REMOTE_SETUP_SOURCE" ]; then
args+=(--setup-source "$REMOTE_SETUP_SOURCE")
fi
(cd "$ROOT_DIR" && ./scripts/bash/migrate-stack.sh "${args[@]}") (cd "$ROOT_DIR" && ./scripts/bash/migrate-stack.sh "${args[@]}")
} }
@@ -891,6 +900,24 @@ apply_server_config(){
fi fi
} }
update_realmlist(){
info "Updating realmlist in database with current SERVER_ADDRESS and REALM_PORT..."
local update_script="$ROOT_DIR/scripts/bash/update-realmlist.sh"
if [ ! -x "$update_script" ]; then
warn "Realmlist update script not found or not executable: $update_script"
return 0
fi
# Run the update script
if bash "$update_script"; then
ok "Realmlist updated successfully"
else
warn "Could not update realmlist - this is normal if database is still initializing"
info "The realmlist will be updated on next deployment or you can run: ./scripts/bash/update-realmlist.sh"
fi
}
main(){ main(){
if [ "$ASSUME_YES" -ne 1 ]; then if [ "$ASSUME_YES" -ne 1 ]; then
if [ -t 0 ]; then if [ -t 0 ]; then
@@ -947,29 +974,32 @@ main(){
fi fi
fi fi
show_step 1 4 "Checking build requirements" show_step 1 7 "Checking build requirements"
if ! prompt_build_if_needed; then if ! prompt_build_if_needed; then
err "Build required but not completed. Deployment cancelled." err "Build required but not completed. Deployment cancelled."
exit 1 exit 1
fi fi
if [ "$KEEP_RUNNING" -ne 1 ]; then if [ "$KEEP_RUNNING" -ne 1 ]; then
show_step 2 4 "Stopping runtime stack" show_step 2 7 "Stopping runtime stack"
stop_runtime_stack stop_runtime_stack
fi fi
show_step 3 5 "Importing user database files" show_step 3 7 "Importing user database files"
info "Checking for database files in ./import/db/ and ./database-import/" info "Checking for database files in ./import/db/ and ./database-import/"
bash "$ROOT_DIR/scripts/bash/import-database-files.sh" bash "$ROOT_DIR/scripts/bash/import-database-files.sh"
show_step 4 6 "Bringing your realm online" show_step 4 7 "Bringing your realm online"
info "Pulling images and waiting for containers to become healthy; this may take a few minutes on first deploy." info "Pulling images and waiting for containers to become healthy; this may take a few minutes on first deploy."
stage_runtime stage_runtime
show_step 5 6 "Applying server configuration" show_step 5 7 "Applying server configuration"
apply_server_config apply_server_config
show_step 6 6 "Finalizing deployment" show_step 6 7 "Updating realmlist"
update_realmlist
show_step 7 7 "Finalizing deployment"
mark_deployment_complete mark_deployment_complete
show_realm_ready show_realm_ready

View File

@@ -58,16 +58,21 @@ The workflow **automatically fetches all module repositories** during the build.
The workflow publishes images with **profile-specific tags** so you know exactly which modules are included: The workflow publishes images with **profile-specific tags** so you know exactly which modules are included:
**Profile-Tagged Images** (recommended): **Profile-Tagged Images** (recommended):
- `<dockerhub-username>/azerothcore-realmmaster:authserver-realmmaster-latest` - `<dockerhub-username>/azerothcore-realmmaster:authserver-realmmaster-latest` ✅ Built nightly
- `<dockerhub-username>/azerothcore-realmmaster:authserver-realmmaster-YYYYMMDD` - `<dockerhub-username>/azerothcore-realmmaster:authserver-realmmaster-YYYYMMDD` ✅ Built nightly
- `<dockerhub-username>/azerothcore-realmmaster:worldserver-realmmaster-latest` - `<dockerhub-username>/azerothcore-realmmaster:worldserver-realmmaster-latest` ✅ Built nightly
- `<dockerhub-username>/azerothcore-realmmaster:worldserver-realmmaster-YYYYMMDD` - `<dockerhub-username>/azerothcore-realmmaster:worldserver-realmmaster-YYYYMMDD` ✅ Built nightly
**Generic Tags** (backward compatibility, defaults to RealmMaster profile): **Generic Tags** (backward compatibility, defaults to RealmMaster profile):
- `<dockerhub-username>/azerothcore-realmmaster:authserver-latest` - `<dockerhub-username>/azerothcore-realmmaster:authserver-latest` ✅ Built nightly
- `<dockerhub-username>/azerothcore-realmmaster:worldserver-latest` - `<dockerhub-username>/azerothcore-realmmaster:worldserver-latest` ✅ Built nightly
The profile name in the tag tells you which module set is included (e.g., `realmmaster`, `suggested-modules`, `all-modules`). **Other Profile Tags** (built on-demand via manual workflow trigger):
- `authserver-suggested-modules-latest` - Available when built
- `authserver-all-modules-latest` - Available when built
- `authserver-playerbots-only-latest` - Available when built
**Note**: Only the RealmMaster profile is built automatically on schedule. Other profiles can be built by manually triggering the workflow with different profile names.
## Required GitHub Secrets ## Required GitHub Secrets

View File

@@ -199,7 +199,32 @@ The remote deployment process transfers:
- ✅ Docker images (exported to `local-storage/images/`) - ✅ Docker images (exported to `local-storage/images/`)
- ✅ Project files (scripts, configs, docker-compose.yml, .env) - ✅ Project files (scripts, configs, docker-compose.yml, .env)
- ✅ Storage directory (unless `--remote-skip-storage` is used) - ✅ Storage directory (unless `--remote-skip-storage` is used)
-Build artifacts (source code, compilation files stay local) -AzerothCore source repository (must be set up separately - see below)
**IMPORTANT: AzerothCore Source Setup**
The AzerothCore source repository (~2GB) is NOT synced during migration to avoid slow transfers. However, it's **required** for database initialization.
**Option 1: Automatic Setup (Recommended)**
Use the `--remote-setup-source` flag to automatically clone the source on the remote host:
```bash
./deploy.sh --remote-host your-server --remote-user youruser --remote-setup-source
```
**Option 2: Manual Setup**
After migration completes, SSH to the remote host and run:
```bash
ssh your-server
cd ~/AzerothCore-RealmMaster # or your custom project directory
./scripts/bash/setup-source.sh
```
**Without this step, database initialization will fail with:**
`❌ FATAL: SQL source directory not found`
### Module Presets ### Module Presets

View File

@@ -48,11 +48,8 @@ cp .env.prebuilt .env
Edit `.env` and set your Docker Hub username: Edit `.env` and set your Docker Hub username:
```bash ```bash
# Change this line: # Change this line to your Docker Hub username:
DOCKERHUB_USERNAME=your-dockerhub-username DOCKERHUB_USERNAME=your-dockerhub-username
# To (example):
DOCKERHUB_USERNAME=uprightbass360
``` ```
### 4. Optional: Customize Settings ### 4. Optional: Customize Settings
@@ -91,10 +88,12 @@ Each module profile gets its own tag:
- **`:authserver-realmmaster-YYYYMMDD`** - Date-tagged RealmMaster builds - **`:authserver-realmmaster-YYYYMMDD`** - Date-tagged RealmMaster builds
- **`:worldserver-realmmaster-YYYYMMDD`** - Date-tagged RealmMaster builds - **`:worldserver-realmmaster-YYYYMMDD`** - Date-tagged RealmMaster builds
Other available profiles (if built): Other profiles (available when built via GitHub Actions):
- **`:authserver-suggested-modules-latest`** - Suggested modules profile - **`:authserver-suggested-modules-latest`** - Suggested modules profile (not yet published)
- **`:authserver-all-modules-latest`** - All modules profile - **`:authserver-all-modules-latest`** - All modules profile (not yet published)
- **`:authserver-playerbots-only-latest`** - Playerbots only - **`:authserver-playerbots-only-latest`** - Playerbots only (not yet published)
**Note**: Currently only the RealmMaster profile is built nightly. Other profiles can be built on-demand by manually triggering the CI/CD workflow.
### Generic Tags (Backward Compatibility) ### Generic Tags (Backward Compatibility)

213
docs/RELEASES.md Normal file
View File

@@ -0,0 +1,213 @@
# Release Strategy
This document explains how AzerothCore RealmMaster releases work and what they contain.
## Release Philosophy
Since **Docker images are stored on Docker Hub**, GitHub releases serve as **deployment packages** rather than source distributions. Each release contains everything users need to deploy pre-built images without building from source.
## What's in a Release?
### Release Assets (ZIP Archive)
Each release includes a downloadable `.zip` file containing:
```
azerothcore-realmmaster-v1.0.0-realmmaster.zip
├── .env.prebuilt # Pre-configured for Docker Hub images
├── docker-compose.yml # Service definitions
├── deploy.sh # Deployment script
├── status.sh # Status monitoring
├── cleanup.sh # Cleanup utilities
├── scripts/ # Required Python/Bash scripts
├── config/ # Module manifest and presets
├── docs/ # Complete documentation
├── QUICKSTART.md # Release-specific quick start
└── README.md # Project overview
```
### Release Notes
Each release includes:
- Module profile and count
- Docker Hub image tags (date-specific and latest)
- Quick start instructions
- Complete module list
- Build information (commit, date, source variant)
- Links to documentation
- Known issues
## Release Types
### 1. Profile-Based Releases
Each module profile gets its own release variant:
- **v1.0.0-realmmaster** - RealmMaster profile (32 modules, recommended)
- **v1.0.0-suggested-modules** - Alternative suggested module set
- **v1.0.0-all-modules** - All supported modules
- **v1.0.0-playerbots-only** - Just playerbots
Users choose the release that matches their desired module set.
### 2. Version Numbering
We use semantic versioning:
- **Major** (v1.0.0 → v2.0.0): Breaking changes, major feature additions
- **Minor** (v1.0.0 → v1.1.0): New modules, feature enhancements
- **Patch** (v1.0.0 → v1.0.1): Bug fixes, documentation updates
## Docker Hub Image Tags
Releases reference specific Docker Hub tags:
### Date-Tagged Images (Recommended for Production)
```
uprightbass360/azerothcore-realmmaster:authserver-realmmaster-20260109
uprightbass360/azerothcore-realmmaster:worldserver-realmmaster-20260109
```
- **Immutable**: Never change
- **Stable**: Guaranteed to match the release
- **Recommended**: For production deployments
### Latest Tags (Auto-Updated)
```
uprightbass360/azerothcore-realmmaster:authserver-realmmaster-latest
uprightbass360/azerothcore-realmmaster:worldserver-realmmaster-latest
```
- **Mutable**: Updated nightly by CI/CD
- **Convenient**: Always get the newest build
- **Use case**: Development, testing, staying current
## Creating a Release
### Automated (Recommended)
Use the GitHub Actions workflow:
1. Go to **Actions****Create Release**
2. Click **Run workflow**
3. Fill in:
- **Version**: `v1.0.0`
- **Profile**: `RealmMaster` (or other profile)
- **Pre-release**: Check if beta/RC
4. Click **Run workflow**
The workflow automatically:
- Creates deployment package with all files
- Generates release notes with module list
- Uploads ZIP archive as release asset
- Creates GitHub release with proper tags
### Manual
If you need to create a release manually:
```bash
# 1. Tag the release
git tag -a v1.0.0 -m "Release v1.0.0 - RealmMaster Profile"
git push origin v1.0.0
# 2. Create deployment package
./scripts/create-release-package.sh v1.0.0 RealmMaster
# 3. Create GitHub release
# Go to GitHub → Releases → Draft a new release
# - Tag: v1.0.0
# - Title: RealmMaster v1.0.0 - RealmMaster Profile
# - Upload: azerothcore-realmmaster-v1.0.0-realmmaster.zip
# - Add release notes
```
## Release Checklist
Before creating a release:
- [ ] Verify CI/CD build succeeded
- [ ] Test Docker Hub images work correctly
- [ ] Update CHANGELOG.md
- [ ] Update version in documentation if needed
- [ ] Verify all module SQL migrations are included
- [ ] Test deployment on clean system
- [ ] Update known issues section
## For Users: Using a Release
### Quick Start
```bash
# 1. Download release
wget https://github.com/uprightbass360/AzerothCore-RealmMaster/releases/download/v1.0.0/azerothcore-realmmaster-v1.0.0-realmmaster.zip
# 2. Extract
unzip azerothcore-realmmaster-v1.0.0-realmmaster.zip
cd azerothcore-realmmaster-v1.0.0-realmmaster
# 3. Configure
nano .env.prebuilt
# Set: DOCKERHUB_USERNAME=your-dockerhub-username
# 4. Deploy
mv .env.prebuilt .env
./deploy.sh
```
### Upgrading Between Releases
```bash
# 1. Backup your data
./scripts/bash/backup.sh
# 2. Download new release
wget https://github.com/.../releases/download/v1.1.0/...
# 3. Extract to new directory
unzip azerothcore-realmmaster-v1.1.0-realmmaster.zip
# 4. Copy your .env and data
cp old-version/.env new-version/.env
cp -r old-version/storage new-version/storage
# 5. Deploy new version
cd new-version
./deploy.sh
```
## Release Schedule
- **Nightly Builds**: Images built automatically at 2 AM UTC
- **Releases**: Created as needed when significant changes accumulate
- **LTS Releases**: Planned quarterly for long-term support
## Support
- **Release Issues**: https://github.com/uprightbass360/AzerothCore-RealmMaster/issues
- **Documentation**: Included in each release ZIP
- **Discord**: https://discord.gg/gkt4y2x
## FAQ
### Why are images on Docker Hub and not in releases?
Docker images can be 1-2GB each. GitHub has a 2GB file limit and releases should be lightweight. Docker Hub is designed for hosting images, GitHub releases are for deployment packages.
### Can I use latest tags in production?
We recommend **date-tagged images** for production (e.g., `authserver-realmmaster-20260109`). Latest tags are updated nightly and may have untested changes.
### How do I know which image version a release uses?
Check the release notes - they include the specific Docker Hub tags (date-stamped) that were tested with that release.
### What if I want to build from source instead?
Clone the repository and use `./setup.sh` + `./build.sh` instead of using pre-built releases. See [GETTING_STARTED.md](GETTING_STARTED.md) for instructions.
### Are releases required?
No! You can:
1. **Use releases**: Download ZIP, deploy pre-built images (easiest)
2. **Use nightly images**: Pull latest tags directly from Docker Hub
3. **Build from source**: Clone repo, build locally (most flexible)
Releases are just convenient snapshots for users who want stability.

View File

@@ -38,6 +38,21 @@ ls storage/config/mod_*.conf*
**Database connection issues** **Database connection issues**
```bash ```bash
# Check if source repository is set up (common issue after remote deployment)
ls -la local-storage/source/azerothcore*/data/sql/base/db_world/
# If empty or missing, set up source:
./scripts/bash/setup-source.sh
# Then restart database import:
docker compose run --rm ac-db-import
# Error: "SQL source directory not found"
# This means the AzerothCore source repository hasn't been cloned.
# Solution: Run ./scripts/bash/setup-source.sh
# See docs/GETTING_STARTED.md for details
# Legacy database issues:
# Verify MySQL is running and responsive # Verify MySQL is running and responsive
docker exec ac-mysql mysql -u root -p -e "SELECT 1;" docker exec ac-mysql mysql -u root -p -e "SELECT 1;"

View File

@@ -153,10 +153,35 @@ if [ -f "$RESTORE_SUCCESS_MARKER" ]; then
if verify_databases_populated; then if verify_databases_populated; then
echo "✅ Backup restoration completed successfully" echo "✅ Backup restoration completed successfully"
cat "$RESTORE_SUCCESS_MARKER" || true cat "$RESTORE_SUCCESS_MARKER" || true
echo "🚫 Skipping database import - data already restored from backup"
# Check if there are pending module SQL updates to apply
echo "🔍 Checking for pending module SQL updates..."
local has_pending_updates=0
# Check if module SQL staging directory has files
if [ -d "/azerothcore/data/sql/updates/db_world" ] && [ -n "$(find /azerothcore/data/sql/updates/db_world -name 'MODULE_*.sql' -type f 2>/dev/null)" ]; then
echo " ⚠️ Found staged module SQL updates that may need application"
has_pending_updates=1
fi
if [ "$has_pending_updates" -eq 0 ]; then
echo "🚫 Skipping database import - data already restored and no pending updates"
exit 0 exit 0
fi fi
echo "📦 Running dbimport to apply pending module SQL updates..."
cd /azerothcore/env/dist/bin
seed_dbimport_conf
if ./dbimport; then
echo "✅ Module SQL updates applied successfully!"
exit 0
else
echo "⚠️ dbimport reported issues - check logs for details"
exit 1
fi
fi
echo "⚠️ Restoration marker found, but databases are empty - forcing re-import" echo "⚠️ Restoration marker found, but databases are empty - forcing re-import"
rm -f "$RESTORE_SUCCESS_MARKER" 2>/dev/null || true rm -f "$RESTORE_SUCCESS_MARKER" 2>/dev/null || true
rm -f "$RESTORE_SUCCESS_MARKER_TMP" 2>/dev/null || true rm -f "$RESTORE_SUCCESS_MARKER_TMP" 2>/dev/null || true
@@ -470,6 +495,65 @@ echo "🚀 Running database import..."
cd /azerothcore/env/dist/bin cd /azerothcore/env/dist/bin
seed_dbimport_conf seed_dbimport_conf
validate_sql_source(){
local sql_base_dir="/azerothcore/data/sql/base"
local required_dirs=("db_auth" "db_world" "db_characters")
local missing_dirs=()
echo "🔍 Validating SQL source availability..."
if [ ! -d "$sql_base_dir" ]; then
cat <<EOF
❌ FATAL: SQL source directory not found at $sql_base_dir
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
The AzerothCore source repository is not mounted or doesn't exist.
This directory should contain SQL schemas for database initialization.
📋 REMEDIATION STEPS:
1. SSH to this host:
ssh $(whoami)@$(hostname)
2. Navigate to project directory and run source setup:
cd $PROJECT_ROOT && ./scripts/bash/setup-source.sh
3. Restart database import:
docker compose run --rm ac-db-import
📦 ALTERNATIVE (Prebuilt Images):
If using Docker images with bundled SQL schemas:
- Set AC_SQL_SOURCE_PATH in .env to point to bundled location
- Example: AC_SQL_SOURCE_PATH=/bundled/sql
📚 Documentation: docs/GETTING_STARTED.md#database-setup
EOF
exit 1
fi
for dir in "${required_dirs[@]}"; do
local full_path="$sql_base_dir/$dir"
if [ ! -d "$full_path" ] || [ -z "$(ls -A "$full_path" 2>/dev/null)" ]; then
missing_dirs+=("$dir")
fi
done
if [ ${#missing_dirs[@]} -gt 0 ]; then
echo ""
echo "❌ FATAL: SQL source directories are empty or missing:"
printf ' - %s\n' "${missing_dirs[@]}"
echo ""
echo "The AzerothCore source directory exists but hasn't been populated with SQL files."
echo "Run './scripts/bash/setup-source.sh' on the host to clone and populate the repository."
echo ""
exit 1
fi
echo "✅ SQL source validation passed - all required schemas present"
}
maybe_run_base_import(){ maybe_run_base_import(){
local mysql_host="${CONTAINER_MYSQL:-ac-mysql}" local mysql_host="${CONTAINER_MYSQL:-ac-mysql}"
local mysql_port="${MYSQL_PORT:-3306}" local mysql_port="${MYSQL_PORT:-3306}"
@@ -506,6 +590,8 @@ maybe_run_base_import(){
fi fi
} }
# Validate SQL source is available before attempting import
validate_sql_source
maybe_run_base_import maybe_run_base_import
if ./dbimport; then if ./dbimport; then
echo "✅ Database import completed successfully!" echo "✅ Database import completed successfully!"

View File

@@ -0,0 +1,96 @@
#!/bin/bash
# AzerothCore RealmMaster - Install Docker NFS Dependencies Fix
# This script installs a systemd drop-in configuration to ensure Docker
# waits for NFS mounts before starting, preventing backup folder deletion issues
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
DROP_IN_SOURCE="$PROJECT_ROOT/config/systemd/docker.service.d/nfs-dependencies.conf"
DROP_IN_TARGET="/etc/systemd/system/docker.service.d/nfs-dependencies.conf"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
log_info() { echo -e "${BLUE} $*${NC}"; }
log_ok() { echo -e "${GREEN}$*${NC}"; }
log_warn() { echo -e "${YELLOW}⚠️ $*${NC}"; }
log_err() { echo -e "${RED}$*${NC}"; }
# Check if running as root
if [ "$EUID" -ne 0 ]; then
log_err "This script must be run as root (use sudo)"
exit 1
fi
# Check if source file exists
if [ ! -f "$DROP_IN_SOURCE" ]; then
log_err "Source configuration file not found: $DROP_IN_SOURCE"
exit 1
fi
# Check if NFS mounts exist
log_info "Checking NFS mount configuration..."
if ! systemctl list-units --type=mount | grep -q "nfs-azerothcore.mount"; then
log_warn "nfs-azerothcore.mount not found. This fix requires NFS mounts to be configured."
log_warn "Continue anyway? (y/n)"
read -r response
if [[ ! "$response" =~ ^[Yy]$ ]]; then
log_info "Installation cancelled."
exit 0
fi
fi
# Create drop-in directory
log_info "Creating systemd drop-in directory..."
mkdir -p "$(dirname "$DROP_IN_TARGET")"
log_ok "Drop-in directory ready: $(dirname "$DROP_IN_TARGET")"
# Install configuration file
log_info "Installing NFS dependencies configuration..."
cp "$DROP_IN_SOURCE" "$DROP_IN_TARGET"
chmod 644 "$DROP_IN_TARGET"
log_ok "Configuration installed: $DROP_IN_TARGET"
# Show what was installed
echo ""
log_info "Installed configuration:"
echo "---"
cat "$DROP_IN_TARGET"
echo "---"
echo ""
# Reload systemd
log_info "Reloading systemd daemon..."
systemctl daemon-reload
log_ok "Systemd daemon reloaded"
# Verify configuration
log_info "Verifying Docker service dependencies..."
echo ""
systemctl show -p After,Requires,Wants docker.service | grep -E '^(After|Requires|Wants)='
echo ""
# Check if Docker is running
if systemctl is-active --quiet docker.service; then
log_warn "Docker is currently running"
log_warn "The new configuration will take effect on next Docker restart or system reboot"
echo ""
log_info "To apply immediately, restart Docker (WARNING: will stop all containers):"
echo " sudo systemctl restart docker.service"
echo ""
log_info "Or reboot the system:"
echo " sudo reboot"
else
log_ok "Docker is not running - configuration will apply on next start"
fi
echo ""
log_ok "Docker NFS dependencies fix installed successfully!"
log_info "Docker will now wait for NFS mounts before starting"
log_info "This prevents backup folders from being deleted during server restarts"

View File

@@ -144,6 +144,7 @@ Options:
--port PORT SSH port (default: 22) --port PORT SSH port (default: 22)
--identity PATH SSH private key (passed to scp/ssh) --identity PATH SSH private key (passed to scp/ssh)
--project-dir DIR Remote project directory (default: ~/<project-name>) --project-dir DIR Remote project directory (default: ~/<project-name>)
--setup-source 0|1 Auto-setup AzerothCore source on remote (0=skip, 1=setup, unset=prompt)
--env-file PATH Use this env file for image lookup and upload (default: ./.env) --env-file PATH Use this env file for image lookup and upload (default: ./.env)
--tarball PATH Output path for the image tar (default: ./local-storage/images/acore-modules-images.tar) --tarball PATH Output path for the image tar (default: ./local-storage/images/acore-modules-images.tar)
--storage PATH Remote storage directory (default: <project-dir>/storage) --storage PATH Remote storage directory (default: <project-dir>/storage)
@@ -168,6 +169,7 @@ SKIP_STORAGE=0
ASSUME_YES=0 ASSUME_YES=0
COPY_SOURCE=0 COPY_SOURCE=0
SKIP_ENV=0 SKIP_ENV=0
REMOTE_SETUP_SOURCE=""
PRESERVE_CONTAINERS=0 PRESERVE_CONTAINERS=0
CLEAN_CONTAINERS=0 CLEAN_CONTAINERS=0
@@ -181,6 +183,7 @@ while [[ $# -gt 0 ]]; do
--env-file) ENV_FILE="$2"; shift 2;; --env-file) ENV_FILE="$2"; shift 2;;
--tarball) TARBALL="$2"; shift 2;; --tarball) TARBALL="$2"; shift 2;;
--storage) REMOTE_STORAGE="$2"; shift 2;; --storage) REMOTE_STORAGE="$2"; shift 2;;
--setup-source) REMOTE_SETUP_SOURCE="$2"; shift 2;;
--skip-storage) SKIP_STORAGE=1; shift;; --skip-storage) SKIP_STORAGE=1; shift;;
--skip-env) SKIP_ENV=1; shift;; --skip-env) SKIP_ENV=1; shift;;
--preserve-containers) PRESERVE_CONTAINERS=1; shift;; --preserve-containers) PRESERVE_CONTAINERS=1; shift;;
@@ -253,7 +256,15 @@ STAGE_SQL_PATH_RAW="$(read_env_value STAGE_PATH_MODULE_SQL "${LOCAL_STORAGE_ROOT
if [ -z "${STORAGE_PATH_LOCAL:-}" ]; then if [ -z "${STORAGE_PATH_LOCAL:-}" ]; then
STORAGE_PATH_LOCAL="$LOCAL_STORAGE_ROOT" STORAGE_PATH_LOCAL="$LOCAL_STORAGE_ROOT"
fi fi
# Expand any env references (e.g., ${STORAGE_PATH_LOCAL}) # Ensure STORAGE_PATH is defined to avoid set -u failures during expansion
if [ -z "${STORAGE_PATH:-}" ]; then
STORAGE_PATH="$(read_env_value STORAGE_PATH "./storage")"
fi
# Ensure STORAGE_MODULE_SQL_PATH is defined to avoid set -u failures during expansion
if [ -z "${STORAGE_MODULE_SQL_PATH:-}" ]; then
STORAGE_MODULE_SQL_PATH="$(read_env_value STORAGE_MODULE_SQL_PATH "${STORAGE_PATH}/module-sql-updates")"
fi
# Expand any env references (e.g., ${STORAGE_PATH_LOCAL}, ${STORAGE_MODULE_SQL_PATH})
STAGE_SQL_PATH_RAW="$(eval "echo \"$STAGE_SQL_PATH_RAW\"")" STAGE_SQL_PATH_RAW="$(eval "echo \"$STAGE_SQL_PATH_RAW\"")"
LOCAL_STAGE_SQL_DIR="$(resolve_path_relative_to_project "$STAGE_SQL_PATH_RAW" "$PROJECT_ROOT")" LOCAL_STAGE_SQL_DIR="$(resolve_path_relative_to_project "$STAGE_SQL_PATH_RAW" "$PROJECT_ROOT")"
REMOTE_STAGE_SQL_DIR="$(resolve_path_relative_to_project "$STAGE_SQL_PATH_RAW" "$PROJECT_DIR")" REMOTE_STAGE_SQL_DIR="$(resolve_path_relative_to_project "$STAGE_SQL_PATH_RAW" "$PROJECT_DIR")"
@@ -438,6 +449,76 @@ setup_remote_repository(){
echo " • Repository synchronized ✓" echo " • Repository synchronized ✓"
} }
setup_source_if_needed(){
local should_setup="${REMOTE_SETUP_SOURCE:-}"
# Check if source already exists and is populated
echo " • Checking for existing AzerothCore source repository..."
if run_ssh "[ -d '$PROJECT_DIR/local-storage/source/azerothcore-playerbots/data/sql/base/db_world' ] && [ -n \"\$(ls -A '$PROJECT_DIR/local-storage/source/azerothcore-playerbots/data/sql/base/db_world' 2>/dev/null)\" ]" 2>/dev/null; then
echo " ✅ Source repository already populated on remote"
return 0
elif run_ssh "[ -d '$PROJECT_DIR/local-storage/source/azerothcore/data/sql/base/db_world' ] && [ -n \"\$(ls -A '$PROJECT_DIR/local-storage/source/azerothcore/data/sql/base/db_world' 2>/dev/null)\" ]" 2>/dev/null; then
echo " ✅ Source repository already populated on remote"
return 0
fi
echo " ⚠️ Source repository not found or empty on remote"
# If not set, ask user (unless --yes)
if [ -z "$should_setup" ]; then
if [ "$ASSUME_YES" = "1" ]; then
# Auto-yes in non-interactive: default to YES for safety
echo " Auto-confirming source setup (--yes flag)"
should_setup=1
else
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "📦 AzerothCore Source Repository Setup"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
echo "The remote server needs AzerothCore source code for database schemas."
echo "This will clone ~2GB repository (one-time operation, takes 2-5 minutes)."
echo ""
echo "Without this, database initialization will FAIL."
echo ""
read -rp "Set up source repository now? [Y/n]: " answer
answer="${answer:-Y}"
case "${answer,,}" in
y|yes) should_setup=1 ;;
*) should_setup=0 ;;
esac
fi
fi
if [ "$should_setup" != "1" ]; then
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "⚠️ WARNING: Source setup skipped"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
echo "You MUST run this manually on the remote host BEFORE starting services:"
echo ""
echo " ssh $USER@$HOST"
echo " cd $PROJECT_DIR"
echo " ./scripts/bash/setup-source.sh"
echo ""
return 0
fi
echo " 🔧 Setting up AzerothCore source repository on remote..."
echo " ⏳ Cloning AzerothCore (this may take 2-5 minutes)..."
# Run setup-source.sh on remote, capturing output
if run_ssh "cd '$PROJECT_DIR' && ./scripts/bash/setup-source.sh" 2>&1 | sed 's/^/ /'; then
echo " ✅ Source repository setup complete"
return 0
else
echo " ❌ Source setup failed (check output above for details)"
echo " ⚠️ Run manually: ssh $USER@$HOST 'cd $PROJECT_DIR && ./scripts/bash/setup-source.sh'"
return 1
fi
}
cleanup_stale_docker_resources(){ cleanup_stale_docker_resources(){
if [ "$PRESERVE_CONTAINERS" -eq 1 ]; then if [ "$PRESERVE_CONTAINERS" -eq 1 ]; then
echo "⋅ Skipping remote container/image cleanup (--preserve-containers)" echo "⋅ Skipping remote container/image cleanup (--preserve-containers)"
@@ -469,6 +550,9 @@ cleanup_stale_docker_resources(){
validate_remote_environment validate_remote_environment
# Set up source repository if needed (after project files are synced)
setup_source_if_needed || true # Don't fail entire deployment if source setup fails
collect_deploy_image_refs collect_deploy_image_refs
echo "⋅ Exporting deployment images to $TARBALL" echo "⋅ Exporting deployment images to $TARBALL"

View File

@@ -0,0 +1,177 @@
#!/bin/bash
# Setup user environment with sudo access and bash completion
set -e
# Colors
GREEN='\033[0;32m'
BLUE='\033[0;34m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
log_info() { echo -e "${BLUE} $*${NC}"; }
log_ok() { echo -e "${GREEN}$*${NC}"; }
log_warn() { echo -e "${YELLOW}⚠️ $*${NC}"; }
TARGET_USER="${1:-${USER}}"
# Check if running as root
if [ "$EUID" -ne 0 ]; then
echo "This script must be run as root (use sudo)"
exit 1
fi
echo ""
log_info "Setting up environment for user: $TARGET_USER"
echo ""
# 1. Add user to sudo group
log_info "Step 1/4: Adding $TARGET_USER to sudo group..."
if groups "$TARGET_USER" | grep -q "\bsudo\b"; then
log_ok "User already in sudo group"
else
usermod -aG sudo "$TARGET_USER"
log_ok "Added $TARGET_USER to sudo group"
fi
# 2. Change default shell to bash
log_info "Step 2/4: Setting default shell to bash..."
CURRENT_SHELL=$(getent passwd "$TARGET_USER" | cut -d: -f7)
if [ "$CURRENT_SHELL" = "/bin/bash" ]; then
log_ok "Default shell already set to bash"
else
chsh -s /bin/bash "$TARGET_USER"
log_ok "Changed default shell from $CURRENT_SHELL to /bin/bash"
fi
# 3. Create .bashrc with bash completion
log_info "Step 3/4: Setting up bash completion..."
USER_HOME=$(getent passwd "$TARGET_USER" | cut -d: -f6)
BASHRC="$USER_HOME/.bashrc"
if [ -f "$BASHRC" ]; then
log_warn ".bashrc already exists, checking for bash completion..."
if grep -q "bash_completion" "$BASHRC"; then
log_ok "Bash completion already configured in .bashrc"
else
log_info "Adding bash completion to existing .bashrc..."
cat >> "$BASHRC" << 'EOF'
# Enable bash completion
if ! shopt -oq posix; then
if [ -f /usr/share/bash-completion/bash_completion ]; then
. /usr/share/bash-completion/bash_completion
elif [ -f /etc/bash_completion ]; then
. /etc/bash_completion
fi
fi
EOF
chown "$TARGET_USER:$TARGET_USER" "$BASHRC"
log_ok "Bash completion added to .bashrc"
fi
else
log_info "Creating new .bashrc with bash completion..."
cat > "$BASHRC" << 'EOF'
# ~/.bashrc: executed by bash(1) for non-login shells.
# If not running interactively, don't do anything
case $- in
*i*) ;;
*) return;;
esac
# History settings
HISTCONTROL=ignoreboth
HISTSIZE=10000
HISTFILESIZE=20000
shopt -s histappend
# Check window size after each command
shopt -s checkwinsize
# Make less more friendly for non-text input files
[ -x /usr/bin/lesspipe ] && eval "$(SHELL=/bin/sh lesspipe)"
# Set a fancy prompt
PS1='\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
# Enable color support for ls and grep
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias ls='ls --color=auto'
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
fi
# Some more ls aliases
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
# Enable bash completion
if ! shopt -oq posix; then
if [ -f /usr/share/bash-completion/bash_completion ]; then
. /usr/share/bash-completion/bash_completion
elif [ -f /etc/bash_completion ]; then
. /etc/bash_completion
fi
fi
# Docker completion (if docker is installed)
if [ -f /usr/share/bash-completion/completions/docker ]; then
. /usr/share/bash-completion/completions/docker
fi
EOF
chown "$TARGET_USER:$TARGET_USER" "$BASHRC"
chmod 644 "$BASHRC"
log_ok "Created .bashrc with bash completion"
fi
# 4. Create .bash_profile to source .bashrc for login shells
log_info "Step 4/4: Setting up bash_profile for login shells..."
BASH_PROFILE="$USER_HOME/.bash_profile"
if [ -f "$BASH_PROFILE" ]; then
if grep -q "\.bashrc" "$BASH_PROFILE"; then
log_ok ".bash_profile already sources .bashrc"
else
log_info "Adding .bashrc sourcing to existing .bash_profile..."
cat >> "$BASH_PROFILE" << 'EOF'
# Source .bashrc if it exists
if [ -f ~/.bashrc ]; then
. ~/.bashrc
fi
EOF
chown "$TARGET_USER:$TARGET_USER" "$BASH_PROFILE"
log_ok ".bash_profile updated to source .bashrc"
fi
else
log_info "Creating .bash_profile..."
cat > "$BASH_PROFILE" << 'EOF'
# ~/.bash_profile: executed by bash(1) for login shells.
# Source .bashrc if it exists
if [ -f ~/.bashrc ]; then
. ~/.bashrc
fi
EOF
chown "$TARGET_USER:$TARGET_USER" "$BASH_PROFILE"
chmod 644 "$BASH_PROFILE"
log_ok "Created .bash_profile"
fi
echo ""
log_ok "Environment setup complete for $TARGET_USER!"
echo ""
echo "Changes applied:"
echo " ✓ Added to sudo group (password required)"
echo " ✓ Default shell changed to /bin/bash"
echo " ✓ Bash completion enabled (.bashrc)"
echo " ✓ Login shell configured (.bash_profile)"
echo ""
log_warn "Important: You need to log out and log back in for shell changes to take effect"
log_info "To test sudo: sudo -v (will prompt for password)"
log_info "To test tab completion: type 'systemctl rest' and press TAB"
log_info "To verify shell: echo \$SHELL (should show /bin/bash)"
echo ""

View File

@@ -0,0 +1,80 @@
#!/bin/bash
# Updates the realmlist table in the database with current SERVER_ADDRESS and REALM_PORT from .env
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Source colors and functions
BLUE='\033[0;34m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m'
info() { printf '%b\n' "${BLUE} $*${NC}"; }
ok() { printf '%b\n' "${GREEN}$*${NC}"; }
warn() { printf '%b\n' "${YELLOW}⚠️ $*${NC}"; }
err() { printf '%b\n' "${RED}$*${NC}"; }
# Load environment variables from .env
if [ -f "$ROOT_DIR/.env" ]; then
# shellcheck disable=SC1091
set -a
source "$ROOT_DIR/.env"
set +a
else
err "No .env file found at $ROOT_DIR/.env"
exit 1
fi
# Check required variables
if [ -z "$SERVER_ADDRESS" ]; then
err "SERVER_ADDRESS not set in .env"
exit 1
fi
if [ -z "$REALM_PORT" ]; then
err "REALM_PORT not set in .env"
exit 1
fi
if [ -z "$MYSQL_HOST" ]; then
err "MYSQL_HOST not set in .env"
exit 1
fi
if [ -z "$MYSQL_USER" ]; then
err "MYSQL_USER not set in .env"
exit 1
fi
if [ -z "$MYSQL_ROOT_PASSWORD" ]; then
err "MYSQL_ROOT_PASSWORD not set in .env"
exit 1
fi
if [ -z "$DB_AUTH_NAME" ]; then
err "DB_AUTH_NAME not set in .env"
exit 1
fi
info "Updating realmlist table..."
info " Address: $SERVER_ADDRESS"
info " Port: $REALM_PORT"
# Try to update the database
if mysql -h "${MYSQL_HOST}" -u"${MYSQL_USER}" -p"${MYSQL_ROOT_PASSWORD}" --skip-ssl-verify "${DB_AUTH_NAME}" \
-e "UPDATE realmlist SET address='${SERVER_ADDRESS}', port=${REALM_PORT} WHERE id=1;" 2>/dev/null; then
ok "Realmlist updated successfully"
# Show the current realmlist entry
mysql -h "${MYSQL_HOST}" -u"${MYSQL_USER}" -p"${MYSQL_ROOT_PASSWORD}" --skip-ssl-verify "${DB_AUTH_NAME}" \
-e "SELECT id, name, address, port FROM realmlist WHERE id=1;" 2>/dev/null || true
exit 0
else
warn "Could not update realmlist table"
warn "This is normal if the database is not yet initialized or MySQL is not running"
exit 1
fi