mirror of
https://github.com/uprightbass360/AzerothCore-RealmMaster.git
synced 2026-01-13 00:58:34 +00:00
tagging updates
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -8,9 +8,10 @@ local-data-tools/
|
|||||||
storage/
|
storage/
|
||||||
source/
|
source/
|
||||||
.claude/
|
.claude/
|
||||||
.env
|
images/
|
||||||
node_modules/
|
node_modules/
|
||||||
.mcp*/
|
.mcp*/
|
||||||
|
.env
|
||||||
package-lock.json
|
package-lock.json
|
||||||
package.json
|
package.json
|
||||||
|
|
||||||
|
|||||||
28
README.md
28
README.md
@@ -224,12 +224,40 @@ Use this workflow to build locally, then push the same stack to a remote host:
|
|||||||
docker compose --profile services-playerbots logs --tail 100 ac-worldserver
|
docker compose --profile services-playerbots logs --tail 100 ac-worldserver
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Remote Deploy Workflow
|
||||||
|
1. **Configure & Build Locally**
|
||||||
|
```bash
|
||||||
|
./setup.sh --module-config sam --playerbot-max-bots 3000
|
||||||
|
./scripts/rebuild-with-modules.sh --yes
|
||||||
|
```
|
||||||
|
2. **Migrate Stack to Remote**
|
||||||
|
```bash
|
||||||
|
./scripts/migrate-stack.sh \
|
||||||
|
--host docker-server \
|
||||||
|
--user sam \
|
||||||
|
--project-dir /home/sam/src/acore-compose
|
||||||
|
```
|
||||||
|
(Exports rebuilt images to `images/acore-modules-images.tar`, including both `acore/...:modules-latest` and `uprightbass360/...:Playerbot` tags, then syncs `storage/` unless `--skip-storage` is provided.)
|
||||||
|
3. **Deploy on Remote Host**
|
||||||
|
```bash
|
||||||
|
ssh docker-server '
|
||||||
|
cd /home/sam/src/acore-compose &&
|
||||||
|
./deploy.sh --skip-rebuild --no-watch
|
||||||
|
'
|
||||||
|
```
|
||||||
|
4. **Verify Services**
|
||||||
|
```bash
|
||||||
|
./status.sh --once
|
||||||
|
docker compose --profile services-playerbots logs --tail 100 ac-worldserver
|
||||||
|
```
|
||||||
|
|
||||||
### Module Presets
|
### Module Presets
|
||||||
- Drop comma-separated module lists into `configurations/*.conf` (for example `configurations/playerbot-modules.conf`).
|
- Drop comma-separated module lists into `configurations/*.conf` (for example `configurations/playerbot-modules.conf`).
|
||||||
- `setup.sh` automatically adds these presets to the module menu and enables the listed modules when selected or when `--module-config <name>` is provided.
|
- `setup.sh` automatically adds these presets to the module menu and enables the listed modules when selected or when `--module-config <name>` is provided.
|
||||||
- Built-in presets:
|
- Built-in presets:
|
||||||
- `configurations/suggested-modules.conf` – default solo-friendly QoL stack.
|
- `configurations/suggested-modules.conf` – default solo-friendly QoL stack.
|
||||||
- `configurations/playerbots-suggested-modules.conf` – suggested stack plus playerbots.
|
- `configurations/playerbots-suggested-modules.conf` – suggested stack plus playerbots.
|
||||||
|
- `configurations/playerbot-only.conf` – playerbot-focused profile (adjust `--playerbot-max-bots`).
|
||||||
- Custom example:
|
- Custom example:
|
||||||
- `configurations/sam.conf` – Sam's playerbot-focused profile (set `--playerbot-max-bots 3000` when using this preset).
|
- `configurations/sam.conf` – Sam's playerbot-focused profile (set `--playerbot-max-bots 3000` when using this preset).
|
||||||
|
|
||||||
|
|||||||
1
configurations/playerbot-only.conf
Normal file
1
configurations/playerbot-only.conf
Normal file
@@ -0,0 +1 @@
|
|||||||
|
MODULE_PLAYERBOTS,MODULE_PLAYER_BOT_LEVEL_BRACKETS,MODULE_SOLO_LFG,MODULE_TRANSMOG,MODULE_NPC_BUFFER,MODULE_LEARN_SPELLS,MODULE_FIREWORKS,MODULE_AUTOBALANCE,MODULE_REAGENT_BANK,MODULE_BLACK_MARKET_AUCTION_HOUSE,MODULE_1V1_ARENA,MODULE_ACCOUNT_ACHIEVEMENTS,MODULE_BREAKING_NEWS,MODULE_BOSS_ANNOUNCER,MODULE_AUTO_REVIVE,MODULE_ELUNA_TS,MODULE_RANDOM_ENCHANTS,MODULE_POCKET_PORTAL,MODULE_INSTANCE_RESET,MODULE_TIME_IS_TIME,MODULE_GAIN_HONOR_GUARD,MODULE_ARAC,MODULE_DUNGEON_RESPAWN,MODULE_NPC_ENCHANTER
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# Utility to migrate the current acore-compose stack to a remote host.
|
# Utility to migrate module images (and optionally storage) to a remote host.
|
||||||
# It assumes the module images have already been rebuilt locally.
|
# Assumes module images have already been rebuilt locally.
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
usage(){
|
usage(){
|
||||||
cat <<EOF
|
cat <<'EOF_HELP'
|
||||||
Usage: $(basename "$0") --host HOST --user USER [options]
|
Usage: $(basename "$0") --host HOST --user USER [options]
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
@@ -14,16 +14,12 @@ Options:
|
|||||||
--user USER SSH username on remote host (required)
|
--user USER SSH username on remote host (required)
|
||||||
--port PORT SSH port (default: 22)
|
--port PORT SSH port (default: 22)
|
||||||
--identity PATH SSH private key (passed to scp/ssh)
|
--identity PATH SSH private key (passed to scp/ssh)
|
||||||
--project-dir DIR Remote directory for the project (default: ~/acore-compose)
|
--project-dir DIR Remote project directory (default: ~/acore-compose)
|
||||||
--tarball PATH Output path for the image tar (default: ./acore-modules-images.tar)
|
--tarball PATH Output path for the image tar (default: ./images/acore-modules-images.tar)
|
||||||
--storage PATH Remote storage directory (default: <project-dir>/storage)
|
--storage PATH Remote storage directory (default: <project-dir>/storage)
|
||||||
--skip-images Do not export/import Docker images
|
--skip-storage Do not sync the storage directory
|
||||||
--help Show this help
|
--help Show this help
|
||||||
|
EOF_HELP
|
||||||
Example:
|
|
||||||
$(basename "$0") --host wow.example.com --user deploy --identity ~/.ssh/id_ed25519 \
|
|
||||||
--project-dir /opt/acore-compose
|
|
||||||
EOF
|
|
||||||
}
|
}
|
||||||
|
|
||||||
HOST=""
|
HOST=""
|
||||||
@@ -33,7 +29,7 @@ IDENTITY=""
|
|||||||
PROJECT_DIR=""
|
PROJECT_DIR=""
|
||||||
TARBALL=""
|
TARBALL=""
|
||||||
REMOTE_STORAGE=""
|
REMOTE_STORAGE=""
|
||||||
SKIP_IMAGES=0
|
SKIP_STORAGE=0
|
||||||
|
|
||||||
while [[ $# -gt 0 ]]; do
|
while [[ $# -gt 0 ]]; do
|
||||||
case "$1" in
|
case "$1" in
|
||||||
@@ -44,11 +40,11 @@ while [[ $# -gt 0 ]]; do
|
|||||||
--project-dir) PROJECT_DIR="$2"; shift 2;;
|
--project-dir) PROJECT_DIR="$2"; shift 2;;
|
||||||
--tarball) TARBALL="$2"; shift 2;;
|
--tarball) TARBALL="$2"; shift 2;;
|
||||||
--storage) REMOTE_STORAGE="$2"; shift 2;;
|
--storage) REMOTE_STORAGE="$2"; shift 2;;
|
||||||
--skip-images) SKIP_IMAGES=1; shift;;
|
--skip-storage) SKIP_STORAGE=1; shift;;
|
||||||
--help|-h) usage; exit 0;;
|
--help|-h) usage; exit 0;;
|
||||||
*) echo "Unknown option: $1" >&2; usage; exit 1;;
|
*) echo "Unknown option: $1" >&2; usage; exit 1;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
if [[ -z "$HOST" || -z "$USER" ]]; then
|
if [[ -z "$HOST" || -z "$USER" ]]; then
|
||||||
echo "--host and --user are required" >&2
|
echo "--host and --user are required" >&2
|
||||||
@@ -58,7 +54,7 @@ fi
|
|||||||
|
|
||||||
PROJECT_DIR="${PROJECT_DIR:-/home/${USER}/acore-compose}"
|
PROJECT_DIR="${PROJECT_DIR:-/home/${USER}/acore-compose}"
|
||||||
REMOTE_STORAGE="${REMOTE_STORAGE:-${PROJECT_DIR}/storage}"
|
REMOTE_STORAGE="${REMOTE_STORAGE:-${PROJECT_DIR}/storage}"
|
||||||
TARBALL="${TARBALL:-$(pwd)/acore-modules-images.tar}"
|
TARBALL="${TARBALL:-$(pwd)/images/acore-modules-images.tar}"
|
||||||
|
|
||||||
SCP_OPTS=(-P "$PORT")
|
SCP_OPTS=(-P "$PORT")
|
||||||
SSH_OPTS=(-p "$PORT")
|
SSH_OPTS=(-p "$PORT")
|
||||||
@@ -75,40 +71,36 @@ run_scp(){
|
|||||||
scp "${SCP_OPTS[@]}" "$@"
|
scp "${SCP_OPTS[@]}" "$@"
|
||||||
}
|
}
|
||||||
|
|
||||||
echo "⋅ Preparing project archive"
|
echo "⋅ Exporting module images to $TARBALL"
|
||||||
TMP_PROJECT_ARCHIVE="$(mktemp -u acore-compose-XXXXXX.tar.gz)"
|
mkdir -p "$(dirname "$TARBALL")"
|
||||||
tar --exclude '.git' --exclude 'storage/backups' --exclude 'storage/logs' \
|
IMAGES_TO_SAVE=(
|
||||||
--exclude 'acore-modules-images.tar' -czf "$TMP_PROJECT_ARCHIVE" -C "$(pwd)/.." "$(basename "$(pwd)")"
|
acore/ac-wotlk-worldserver:modules-latest
|
||||||
|
acore/ac-wotlk-authserver:modules-latest
|
||||||
|
)
|
||||||
|
if docker image inspect uprightbass360/azerothcore-wotlk-playerbots:worldserver-Playerbot >/dev/null 2>&1; then
|
||||||
|
IMAGES_TO_SAVE+=(uprightbass360/azerothcore-wotlk-playerbots:worldserver-Playerbot)
|
||||||
|
fi
|
||||||
|
if docker image inspect uprightbass360/azerothcore-wotlk-playerbots:authserver-Playerbot >/dev/null 2>&1; then
|
||||||
|
IMAGES_TO_SAVE+=(uprightbass360/azerothcore-wotlk-playerbots:authserver-Playerbot)
|
||||||
|
fi
|
||||||
|
docker image save "${IMAGES_TO_SAVE[@]}" > "$TARBALL"
|
||||||
|
|
||||||
if [[ $SKIP_IMAGES -eq 0 ]]; then
|
if [[ $SKIP_STORAGE -eq 0 ]]; then
|
||||||
echo "⋅ Exporting module images to $TARBALL"
|
if [[ -d storage ]]; then
|
||||||
docker image save \
|
echo "⋅ Syncing storage to remote"
|
||||||
acore/ac-wotlk-worldserver:modules-latest \
|
run_ssh "mkdir -p '$REMOTE_STORAGE'"
|
||||||
acore/ac-wotlk-authserver:modules-latest \
|
find storage -mindepth 1 -maxdepth 1 -print0 | xargs -0 -I{} scp "${SCP_OPTS[@]}" -r '{}' "$USER@$HOST:$REMOTE_STORAGE/"
|
||||||
> "$TARBALL"
|
else
|
||||||
|
echo "⋅ Skipping storage sync (storage/ missing)"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "⋅ Skipping storage sync"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "⋅ Removing rebuild sentinel"
|
echo "⋅ Loading images on remote"
|
||||||
rm -f storage/modules/.requires_rebuild || true
|
run_scp "$TARBALL" "$USER@$HOST:/tmp/acore-modules-images.tar"
|
||||||
|
run_ssh "docker load < /tmp/acore-modules-images.tar && rm /tmp/acore-modules-images.tar"
|
||||||
echo "⋅ Syncing project to remote $USER@$HOST:$PROJECT_DIR"
|
|
||||||
run_ssh "mkdir -p '$PROJECT_DIR'"
|
|
||||||
run_scp "$TMP_PROJECT_ARCHIVE" "$USER@$HOST:/tmp/acore-compose.tar.gz"
|
|
||||||
run_ssh "tar -xzf /tmp/acore-compose.tar.gz -C '$PROJECT_DIR' --strip-components=1 && rm /tmp/acore-compose.tar.gz"
|
|
||||||
|
|
||||||
echo "⋅ Syncing storage to remote"
|
|
||||||
run_ssh "mkdir -p '$REMOTE_STORAGE'"
|
|
||||||
run_scp -r storage/* "$USER@$HOST:$REMOTE_STORAGE/"
|
|
||||||
|
|
||||||
if [[ $SKIP_IMAGES -eq 0 ]]; then
|
|
||||||
echo "⋅ Transferring docker images"
|
|
||||||
run_scp "$TARBALL" "$USER@$HOST:/tmp/acore-modules-images.tar"
|
|
||||||
run_ssh "docker load < /tmp/acore-modules-images.tar && rm /tmp/acore-modules-images.tar"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "⋅ Remote prepares completed"
|
echo "⋅ Remote prepares completed"
|
||||||
echo "Run the following on the remote host to deploy:"
|
echo "Run on the remote host to deploy:"
|
||||||
echo " cd $PROJECT_DIR && ./deploy.sh --skip-rebuild --no-watch"
|
echo " cd $PROJECT_DIR && ./deploy.sh --skip-rebuild --no-watch"
|
||||||
|
|
||||||
rm -f "$TMP_PROJECT_ARCHIVE"
|
|
||||||
echo "Migration script finished"
|
|
||||||
|
|||||||
@@ -197,6 +197,16 @@ fi
|
|||||||
echo "🚀 Building AzerothCore with modules..."
|
echo "🚀 Building AzerothCore with modules..."
|
||||||
docker compose build --no-cache
|
docker compose build --no-cache
|
||||||
|
|
||||||
|
echo "🔖 Tagging modules-latest images"
|
||||||
|
docker tag acore/ac-wotlk-worldserver:master acore/ac-wotlk-worldserver:modules-latest
|
||||||
|
docker tag acore/ac-wotlk-authserver:master acore/ac-wotlk-authserver:modules-latest
|
||||||
|
|
||||||
|
if [ "$(read_env MODULE_PLAYERBOTS "0")" = "1" ]; then
|
||||||
|
echo "🔁 Tagging playerbot images uprightbass360/azerothcore-wotlk-playerbots:*"
|
||||||
|
docker tag acore/ac-wotlk-worldserver:modules-latest uprightbass360/azerothcore-wotlk-playerbots:worldserver-Playerbot
|
||||||
|
docker tag acore/ac-wotlk-authserver:modules-latest uprightbass360/azerothcore-wotlk-playerbots:authserver-Playerbot
|
||||||
|
fi
|
||||||
|
|
||||||
show_rebuild_step 5 5 "Cleaning up build containers"
|
show_rebuild_step 5 5 "Cleaning up build containers"
|
||||||
echo "🧹 Cleaning up source build containers..."
|
echo "🧹 Cleaning up source build containers..."
|
||||||
docker compose down --remove-orphans >/dev/null 2>&1 || true
|
docker compose down --remove-orphans >/dev/null 2>&1 || true
|
||||||
|
|||||||
Reference in New Issue
Block a user