Merge branch 'master' into Playerbot

This commit is contained in:
Yunfan Li
2025-06-29 10:55:43 +08:00
18 changed files with 409 additions and 602 deletions

View File

@@ -1,234 +0,0 @@
########################################################################################
# SETTINGS #
########################################################################################
$mysql_host = "127.0.0.1"
$mysql_user = "export"
$mysql_password = "export"
$mysql_database_auth = "acore_auth"
$mysql_database_characters = "acore_characters"
$mysql_database_world = "acore_world"
########################################################################################
# SETTINGS END #
########################################################################################
# Set MySQL password as temporary env var
$env:MYSQL_PWD = $mysql_password
# Get the directory to sql\base directory
$scriptDirectory = $PSScriptRoot
$relativePath = "..\..\data\sql\base"
$combinedPath = Join-Path -Path $scriptDirectory -ChildPath $relativePath
$fullPath = Resolve-Path -Path $combinedPath
# Define the output directory (using database name)
$output_directory_auth = "$fullPath\db_auth"
$output_directory_characters = "$fullPath\db_characters"
$output_directory_world = "$fullPath\db_world"
Write-Host " ___ _ _ ___ "
Write-Host "/ \ ___ ___ _ _ ___ | |_ | |_ / __| ___ _ _ ___ "
Write-Host "| - ||_ // -_)| '_|/ _ \| _|| \ | (__ / _ \| '_|/ -_)"
Write-Host "|_|_|/__|\___||_| \___/ \__||_||_| \___|\___/|_| \___|"
Write-Host "AzerothCore 3.3.5a - www.azerothcore.org"
Write-Host ""
Write-Host "Welcome to the AzerothCore Database Exporter for database squashes!"
Write-Host ""
Write-Host "You have configured:"
Write-Host "Database Auth: '$mysql_database_auth'"
Write-Host "Database Characters: '$mysql_database_characters'"
Write-Host "Database World: '$mysql_database_world'"
Write-Host "Output Dir Auth: '$output_directory_auth'"
Write-Host "Output Dir Characters: '$output_directory_characters'"
Write-Host "Output Dir World: '$output_directory_world'"
Write-Host ""
Write-Host "Make sure you read the entire process before you continue."
Write-Host "https://github.com/azerothcore/azerothcore-wotlk/blob/master/data/sql/base/database-squash.md"
Write-Host "https://github.com/azerothcore/azerothcore-wotlk/blob/master/apps/DatabaseExporter/databaseexporter.md"
Write-Host ""
# Check if the user wants to continue using the tool
do {
$confirmation = Read-Host "Do you want to continue using the tool? (Y/N)"
if ($confirmation -eq 'Y' -or $confirmation -eq 'y') {
# Continue the script
Write-Host "AzerothCore Database Exporter starts."
$continue = $true
}
elseif ($confirmation -eq 'N' -or $confirmation -eq 'n') {
# Exit the script
Write-Host "Exiting the AzerothCore Database Exporter."
exit
}
else {
Write-Host "Invalid input. Please enter Y or N."
$continue = $null
}
} while ($continue -eq $null)
# Remove the output directory if it exist
if (Test-Path $output_directory_auth) {
Remove-Item -Path $output_directory_auth -Recurse -Force
Write-Host "Deleted directory $output_directory_auth"
}
if (Test-Path $output_directory_characters) {
Remove-Item -Path $output_directory_characters -Recurse -Force
Write-Host "Deleted directory $output_directory_characters"
}
if (Test-Path $output_directory_world) {
Remove-Item -Path $output_directory_world -Recurse -Force
Write-Host "Deleted directory $output_directory_world"
}
# Create the output directory if it doesn't exist
if (-not (Test-Path -Path $output_directory_auth)) {
New-Item -ItemType Directory -Force -Path $output_directory_auth
Write-Host "Created directory $output_directory_auth"
}
if (-not (Test-Path -Path $output_directory_characters)) {
New-Item -ItemType Directory -Force -Path $output_directory_characters
Write-Host "Created directory $output_directory_characters"
}
if (-not (Test-Path -Path $output_directory_world)) {
New-Item -ItemType Directory -Force -Path $output_directory_world
Write-Host "Created directory $output_directory_world"
}
# Fix for dumping TIMESTAMP data
$timezone = "+01:00"
$mysqlCommand = "SET time_zone = '$timezone';"
$mysqlExec = "mysql -h $mysql_host -u $mysql_user -p$mysql_password -e `"$mysqlCommand`""
Invoke-Expression -Command $mysqlExec
# PS script uses non-utf-8 encoding by default
# https://stackoverflow.com/a/58438716
# Save the current encoding and switch to UTF-8.
$prev = [Console]::OutputEncoding
[Console]::OutputEncoding = [System.Text.UTF8Encoding]::new()
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT AUTH DATABASE START"
Write-Host "#########################################################"
Write-Host ""
Write-Host "Please enter your password for user '$mysql_user'"
# Export Auth Database
# Connect to MySQL and get all the tables
$tables_auth = mysql -h $mysql_host -u $mysql_user -D $mysql_database_auth -e "SHOW TABLES;" | Select-Object -Skip 1
# Iterate through each table and export both the structure and contents into the same SQL file
foreach ($table in $tables_auth) {
# Define the output file path for this table
$output_file = "$output_directory_auth\$table.sql"
# Clear the content of the output file if it exists, or create a new one
if (Test-Path $output_file) {
Clear-Content -Path $output_file
}
# Export the table structure (CREATE TABLE) and table data (INSERT) to the SQL file
$create_table_command = "mysqldump -h $mysql_host -u $mysql_user --skip-tz-utc $mysql_database_auth $table"
$create_table_output = Invoke-Expression -Command $create_table_command
# write file with utf-8 encoding
# https://stackoverflow.com/a/32951824
[IO.File]::WriteAllLines($output_file, $create_table_output)
# Format the INSERT values to be on seperate lines.
$content = Get-Content -Raw $output_file
$formattedContent = $content -replace 'VALUES \(', "VALUES`r`n("
$formattedContent = $formattedContent -replace '\),', "),`r`n"
$formattedContent | Set-Content $output_file
Write-Host "Exported structure and data for table $table to $output_file"
}
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT AUTH DATABASE END"
Write-Host "#########################################################"
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT CHARACTERS DATABASE START"
Write-Host "#########################################################"
Write-Host ""
Write-Host "Please enter your password for user '$mysql_user'"
# Export Characters Database
# Connect to MySQL and get all the tables
$tables_characters = mysql -h $mysql_host -u $mysql_user -D $mysql_database_characters -e "SHOW TABLES;" | Select-Object -Skip 1
# Iterate through each table and export both the structure and contents into the same SQL file
foreach ($table in $tables_characters) {
# Define the output file path for this table
$output_file = "$output_directory_characters\$table.sql"
# Clear the content of the output file if it exists, or create a new one
if (Test-Path $output_file) {
Clear-Content -Path $output_file
}
# Export the table structure (CREATE TABLE) and table data (INSERT) to the SQL file
$create_table_command = "mysqldump -h $mysql_host -u $mysql_user --skip-tz-utc $mysql_database_characters $table"
$create_table_output = Invoke-Expression -Command $create_table_command
# write file with utf-8 encoding
# https://stackoverflow.com/a/32951824
[IO.File]::WriteAllLines($output_file, $create_table_output)
# Format the INSERT values to be on seperate lines.
$content = Get-Content -Raw $output_file
$formattedContent = $content -replace 'VALUES \(', "VALUES`r`n("
$formattedContent = $formattedContent -replace '\),', "),`r`n"
$formattedContent | Set-Content $output_file
Write-Host "Exported structure and data for table $table to $output_file"
}
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT CHARACTERS DATABASE END"
Write-Host "#########################################################"
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT WORLD DATABASE START"
Write-Host "#########################################################"
Write-Host ""
Write-Host "Please enter your password for user '$mysql_user'"
# Export World Database
# Connect to MySQL and get all the tables
$tables_world = mysql -h $mysql_host -u $mysql_user -D $mysql_database_world -e "SHOW TABLES;" | Select-Object -Skip 1
# Iterate through each table and export both the structure and contents into the same SQL file
foreach ($table in $tables_world) {
# Define the output file path for this table
$output_file = "$output_directory_world\$table.sql"
# Clear the content of the output file if it exists, or create a new one
if (Test-Path $output_file) {
Clear-Content -Path $output_file
}
# Export the table structure (CREATE TABLE) and table data (INSERT) to the SQL file
$create_table_command = "mysqldump -h $mysql_host -u $mysql_user --skip-tz-utc $mysql_database_world $table"
$create_table_output = Invoke-Expression -Command $create_table_command
# write file with utf-8 encoding
# https://stackoverflow.com/a/32951824
[IO.File]::WriteAllLines($output_file, $create_table_output)
# Format the INSERT values to be on seperate lines.
$content = Get-Content -Raw $output_file
$formattedContent = $content -replace 'VALUES \(', "VALUES`r`n("
$formattedContent = $formattedContent -replace '\),', "),`r`n"
$formattedContent | Set-Content $output_file
Write-Host "Exported structure and data for table $table to $output_file"
}
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT WORLD DATABASE END"
Write-Host "#########################################################"
Write-Host ""
Write-Host "Database Exporter completed."
Write-Host "Have a nice day :)"
# Restore the previous encoding.
[Console]::OutputEncoding = $prev

View File

@@ -1,85 +0,0 @@
# The AzerothCore Database Exporter for Database Squashes
> [!CAUTION]
> These steps are only for project maintainers who intend to update base files.
## Manual setting updates
Update the settings in `DatabaseExporter.ps1` to reflect your setup by opening it with your preffered text editor.
> [!NOTE]
> Only update the settings within the SETTINGS block.
These are the default settings:
```ps
########################################################################################
# SETTINGS #
########################################################################################
$mysql_host = "127.0.0.1"
$mysql_user = "export"
$mysql_password = "export"
$mysql_database_auth = "acore_auth"
$mysql_database_characters = "acore_characters"
$mysql_database_world = "acore_world"
########################################################################################
# SETTINGS END #
########################################################################################
```
## Description of the tool
This tool updates the base files automatically. Hence, it must run from this directory.
This is how it works step-by-step:
1. Check that all paths look correct.
2. Accept to continue using the tool.
3. The tool will delete the `db_auth` `db_characters` `db_world` directories in `..\..\data\sql\base\`
4. The tool will create the `db_auth` `db_characters` `db_world` directories in `..\..\data\sql\base\`
5. The tool will export the `db_auth` table into `..\..\data\sql\base\db_auth\`
6. The tool will export the `db_characters` table into `..\..\data\sql\base\db_characters\`
7. The tool will export the `db_world` table into `..\..\data\sql\base\db_world\`
## Run the tool
> [!IMPORTANT]
> This tool CAN NOT be moved outside this directory. If you do it will create files in the wrong places.
1. Make sure you have MySQL installed on your system and that the mysqldump tool is accessible by your PATH system variable. If it is not set you will encounter errors.
- Go into System Variables
- Open the PATH variable
- Add the path to your $\MySQL Server\bin\ - e.g. C:\Program Files\MySQL\MySQL Server 8.4\bin\
2. If you haven't run PowerShell scripts before, you'll need to adjust the execution policy.
- Open PowerShell as an Administrator.
- Run the following command to allow running scripts:
```ps
Set-ExecutionPolicy RemoteSigned -Scope CurrentUser
```
- This allows scripts to run on your system, but they need to be locally created or downloaded from trusted sources.
3. Open PowerShell (PS)
- Press Win + X and select Windows PowerShell (Admin) / Terminal (Admin)
4. Navigate to the script
- In PS, use the `cd` command to change the directory
```ps
cd "C:\AzerothCore\apps\DatabaseExporter"
```
5. Run the script
- In PS, run the script
```ps
.\DatabaseExporter.ps1
```
6. Follow the instructions given by the tool.
7. Now refer back to the database-squash.md instructions. (Located in ..\..\data\sql\base\)
Completed :)

View File

@@ -0,0 +1,69 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
if [[ "$PROJECT_ROOT" =~ ^/([a-zA-Z])/(.*) ]]; then
DRIVE_LETTER="${BASH_REMATCH[1]}"
PATH_REMAINDER="${BASH_REMATCH[2]}"
PROJECT_ROOT="${DRIVE_LETTER^^}:/${PATH_REMAINDER}"
fi
BASE_OUTPUT_DIR="$PROJECT_ROOT/data/sql/base"
read -p "Enter MySQL username: " DB_USER
read -p "Enter MySQL password: " DB_PASS
read -p "Enter MySQL host (default: localhost): " DB_HOST
DB_HOST=${DB_HOST:-localhost}
read -p "Enter MySQL port (default: 3306): " DB_PORT
DB_PORT=${DB_PORT:-3306}
# Prompt for database names
read -p "Enter name of Auth database [default: acore_auth]: " DB_AUTH
DB_AUTH=${DB_AUTH:-acore_auth}
read -p "Enter name of Characters database [default: acore_characters]: " DB_CHARACTERS
DB_CHARACTERS=${DB_CHARACTERS:-acore_characters}
read -p "Enter name of World database [default: acore_world]: " DB_WORLD
DB_WORLD=${DB_WORLD:-acore_world}
# Mapping for folder names
declare -A DB_MAP=(
["$DB_AUTH"]="db_auth"
["$DB_CHARACTERS"]="db_characters"
["$DB_WORLD"]="db_world"
)
# Dump each database
for DB_NAME in "${!DB_MAP[@]}"; do
FOLDER_NAME="${DB_MAP[$DB_NAME]}"
echo "📦 Dumping database '$DB_NAME' into folder '$FOLDER_NAME'"
echo "$BASE_OUTPUT_DIR/$FOLDER_NAME"
mkdir -p "$BASE_OUTPUT_DIR/$FOLDER_NAME"
TABLES=$(mysql -u "$DB_USER" -p"$DB_PASS" -h "$DB_HOST" -P "$DB_PORT" -N -e "SHOW TABLES FROM \`$DB_NAME\`;")
if [[ -z "$TABLES" ]]; then
echo "⚠️ No tables found or failed to connect to '$DB_NAME'. Skipping."
continue
fi
while IFS= read -r raw_table; do
TABLE=$(echo "$raw_table" | tr -d '\r"' | xargs)
if [[ -n "$TABLE" ]]; then
echo " ➤ Dumping table: $TABLE"
mysqldump -u $DB_USER -p$DB_PASS -h $DB_HOST -P $DB_PORT --extended-insert $DB_NAME $TABLE > $BASE_OUTPUT_DIR/$FOLDER_NAME/$TABLE.sql
# cleanup files
sed -E '
s/VALUES[[:space:]]*/VALUES\n/;
:a
s/\),\(/\),\n\(/g;
ta
' "$BASE_OUTPUT_DIR/$FOLDER_NAME/$TABLE.sql" > "$BASE_OUTPUT_DIR/$FOLDER_NAME/${TABLE}_formatted.sql"
mv "$BASE_OUTPUT_DIR/$FOLDER_NAME/${TABLE}_formatted.sql" "$BASE_OUTPUT_DIR/$FOLDER_NAME/$TABLE.sql"
fi
done <<< "$TABLES"
done
echo "✅ Done dumping all specified databases."

View File

@@ -0,0 +1,16 @@
# The AzerothCore Database Exporter for Database Squashes
> [!CAUTION]
> These steps are only for project maintainers who intend to update base files.
## Requirements
1. MySQL
2. mysqldump
## Usage
1. Run DatabaseExporter.sh from the current directory.
2. Fill in required data within the CLI.
3. The tool will autopopulate the basefile directories.
4. Done.

View File

@@ -0,0 +1,52 @@
#!/usr/bin/env bash
echo "❗CAUTION"
echo "This tool is only supposed to be used by AzerothCore Maintainers."
echo "The tool is used to prepare for, and generate a database squash."
echo
echo "Before you continue make sure you have read"
echo "https://github.com/azerothcore/azerothcore-wotlk/blob/master/data/sql/base/database-squash.md"
echo
read -p "Are you sure you want to continue (Y/N)?" choice
case "$choice" in
y|Y ) echo "Starting...";;
* ) echo "Aborted"; exit 0 ;;
esac
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
if [[ "$PROJECT_ROOT" =~ ^/([a-zA-Z])/(.*) ]]; then
DRIVE_LETTER="${BASH_REMATCH[1]}"
PATH_REMAINDER="${BASH_REMATCH[2]}"
PROJECT_ROOT="${DRIVE_LETTER^^}:/${PATH_REMAINDER}"
fi
VERSION_UPDATER_PATH="$PROJECT_ROOT/apps/DatabaseSquash/VersionUpdater/versionupdater.sh"
"$VERSION_UPDATER_PATH"
echo "✅ VersionUpdater Completed..."
echo
echo "❗IMPORTANT!"
echo "1. Before you continue you need to drop all your databases."
echo "2. Run WorldServer to populate the database."
echo
echo "❗DO NOT continue before you have completed the steps above!"
echo
echo "The next step will export your database and overwrite the base files."
echo
read -p "Are you sure you want to export your database (Y/N)?" choice
case "$choice" in
y|Y ) echo "Starting...";;
* ) echo "Aborted"; exit 0 ;;
esac
DATABASE_EXPORTER_PATH="$PROJECT_ROOT/apps/DatabaseSquash/DatabaseExporter/databaseexporter.sh"
"$DATABASE_EXPORTER_PATH"
echo "✅ DatabaseExporter Completed..."
echo "✅ DatabaseSquash Completed... "
echo
read -p "Press Enter to exit..."

View File

@@ -0,0 +1,84 @@
#!/bin/bash
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
if [[ "$PROJECT_ROOT" =~ ^/([a-zA-Z])/(.*) ]]; then
DRIVE_LETTER="${BASH_REMATCH[1]}"
PATH_REMAINDER="${BASH_REMATCH[2]}"
PROJECT_ROOT="${DRIVE_LETTER^^}:/${PATH_REMAINDER}"
fi
ACORE_JSON_PATH="$PROJECT_ROOT/acore.json"
DB_WORLD_UPDATE_DIR="$PROJECT_ROOT/data/sql/updates/db_world"
VERSION_LINE=$(grep '"version"' "$ACORE_JSON_PATH")
VERSION=$(echo "$VERSION_LINE" | sed -E 's/.*"version": *"([^"]+)".*/\1/')
# Parse version into parts
if [[ "$VERSION" =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)(.*)$ ]]; then
MAJOR="${BASH_REMATCH[1]}"
SUFFIX="${BASH_REMATCH[4]}"
NEW_VERSION="$((MAJOR + 1)).0.0$SUFFIX"
# Replace version in file
sed -i.bak -E "s/(\"version\": *\")[^\"]+(\" *)/\1$NEW_VERSION\2/" "$ACORE_JSON_PATH"
rm -f "$ACORE_JSON_PATH.bak"
echo "✅ Version updated to $NEW_VERSION"
else
echo "Error: Could not parse version string: $VERSION"
exit 1
fi
# Extract the new major version from NEW_VERSION
if [[ "$NEW_VERSION" =~ ^([0-9]+)\. ]]; then
NEW_MAJOR="${BASH_REMATCH[1]}"
else
echo "Error: Unable to extract major version from $NEW_VERSION"
exit 1
fi
# Prepare SQL content
DB_VERSION_CONTENT="'ACDB 335.${NEW_MAJOR}-dev'"
SQL_QUERY="UPDATE \`version\` SET \`db_version\`=${DB_VERSION_CONTENT}, \`cache_id\`=${NEW_MAJOR} LIMIT 1;"
# Format date as yyyy_mm_dd
TODAY=$(date +%Y_%m_%d)
# Ensure directory exists
mkdir -p "$DB_WORLD_UPDATE_DIR"
# List existing files for today
existing_files=($(find "$DB_WORLD_UPDATE_DIR" -maxdepth 1 -type f -name "${TODAY}_*.sql" 2>/dev/null))
# Determine next xx counter
# Determine next xx
COUNTER="00"
if [ ${#existing_files[@]} -gt 0 ]; then
max=0
for file in "${existing_files[@]}"; do
basename=$(basename "$file")
if [[ "$basename" =~ ^${TODAY}_([0-9]{2})\.sql$ ]]; then
num=${BASH_REMATCH[1]}
if [[ "$num" =~ ^[0-9]+$ ]] && (( 10#$num > max )); then
max=$((10#$num))
fi
fi
done
COUNTER=$(printf "%02d" $((max + 1)))
fi
# Compose final file path
SQL_FILENAME="${TODAY}_${COUNTER}.sql"
SQL_FILE_PATH="$DB_WORLD_UPDATE_DIR/$SQL_FILENAME"
# Write to file
{
echo "-- Auto-generated by VersionUpdater.sh on $(date)"
echo "$SQL_QUERY"
} > "$SQL_FILE_PATH"
echo "✅ SQL file created at $SQL_FILE_PATH"

View File

@@ -0,0 +1,10 @@
# The AzerothCore Version Updater for Database Squashes
> [!CAUTION]
> These steps are only for project maintainers who intend to update base files.
## Usage
1. Run VersionUpdater.sh from the current directory.
2. The tool will update acore.json and create a new update sql file.
3. Done.

View File

@@ -0,0 +1,11 @@
# The AzerothCore DatabaseSquash tool for Database Squashes
> [!CAUTION]
> These steps are only for project maintainers who intend to update base files.
## Usage
1. Run DatabaseSquash.sh from the current directory.
2. The tool will run VersionUpdater.sh and DatabaseExporter.sh
3. Follow the instructions in the CLI.
4. Done.

View File

@@ -1,132 +0,0 @@
# Get the directory to acore.json
$scriptDirectory = $PSScriptRoot
$relativePath = "..\.."
$combinedPath = Join-Path -Path $scriptDirectory -ChildPath $relativePath
$fullPath = Resolve-Path -Path $combinedPath
$jsonFilePath = "$fullPath\acore.json"
# Get the directory for SQL update
$relativePathDbWorldUpdate = "..\..\data\sql\updates\db_world"
$combinedPathDbWorldUpdate = Join-Path -Path $scriptDirectory -ChildPath $relativePathDbWorldUpdate
$fullPathDbWorldUpdate = Resolve-Path -Path $combinedPathDbWorldUpdate
Write-Host " ___ _ _ ___ "
Write-Host "/ \ ___ ___ _ _ ___ | |_ | |_ / __| ___ _ _ ___ "
Write-Host "| - ||_ // -_)| '_|/ _ \| _|| \ | (__ / _ \| '_|/ -_)"
Write-Host "|_|_|/__|\___||_| \___/ \__||_||_| \___|\___/|_| \___|"
Write-Host "AzerothCore 3.3.5a - www.azerothcore.org"
Write-Host ""
Write-Host "Welcome to the AzerothCore Version Updater for database squashes!"
Write-Host ""
Write-Host "You have configured:"
Write-Host "acore.json Path: '$jsonFilePath'"
Write-Host "World SQL Updates path: '$fullPathDbWorldUpdate'"
Write-Host ""
Write-Host "Make sure you read the entire process before you continue."
Write-Host "https://github.com/azerothcore/azerothcore-wotlk/blob/master/data/sql/base/database-squash.md"
Write-Host "https://github.com/azerothcore/azerothcore-wotlk/blob/master/apps/VersionUpdater/versionupdater.md"
Write-Host ""
# Check if the user wants to continue using the tool
do {
$confirmation = Read-Host "Do you want to continue using the tool? (Y/N)"
if ($confirmation -eq 'Y' -or $confirmation -eq 'y') {
# Continue the script
Write-Host "AzerothCore Version Updater starts."
Write-Host ""
$continue = $true
}
elseif ($confirmation -eq 'N' -or $confirmation -eq 'n') {
# Exit the script
Write-Host "Exiting the AzerothCore Version Updater."
exit
}
else {
Write-Host "Invalid input. Please enter Y or N."
$continue = $null
}
} while ($continue -eq $null)
# Read the JSON file and convert it to a PowerShell object
$jsonContent = Get-Content -Path $jsonFilePath | ConvertFrom-Json
# Get the current version
$currentVersion = $jsonContent.version
# Match version components (major.minor.patch and optional suffix like -dev or -alpha)
if ($currentVersion -match '(\d+)\.(\d+)\.(\d+)(-.*)?') {
$major = $matches[1]
$minor = $matches[2]
$patch = $matches[3]
$suffix = $matches[4]
# Increment the major version
$major = [int]$major + 1
# Reset minor and patch version to 0 (if incrementing major)
$minor = 0
$patch = 0
# Reassemble the version with the suffix if it exists
$newVersion = "$major.$minor.$patch$suffix"
# Update the version in the JSON object
$jsonContent.version = $newVersion
} else {
Write-Host "Unknown error in $jsonFilePath. Exiting."
exit
}
# Convert the updated object back to JSON format
$newJsonContent = $jsonContent | ConvertTo-Json -Depth 3
# Write the updated content back to the file
$newJsonContent | Set-Content -Path $jsonFilePath
Write-Host "acore.json version updated to $newVersion"
# Create the SQL Version update file.
# Get today's date in the format YYYY_MM_DD
$today = Get-Date -Format "yyyy_MM_dd"
# Get the list of files in the directory that match the pattern "YYYY_MM_DD_versionNumber.sql"
$existingFiles = Get-ChildItem -Path $fullPathDbWorldUpdate -Filter "$today*_*.sql"
# If no files exist for today, start with version number 00
if ($existingFiles.Count -eq 0) {
[int]$newVersionNumber = 0
} else {
# Extract the version number from the existing files (e.g., YYYY_MM_DD_versionNumber.sql)
$maxVersionNumber = $existingFiles | ForEach-Object {
if ($_ -match "$today_(\d{2})\.sql") {
[int]$matches[1]
}
} | Measure-Object -Maximum | Select-Object -ExpandProperty Maximum
# Increment the version number by 1
[int]$newVersionNumber = $maxVersionNumber + 1
}
# Format the new version number as a two-digit number (e.g., 01, 02, etc.)
$formattedVersionNumber = $newVersionNumber.ToString("D2")
# Define the new filename using the date and incremented version number
$newFileName = "$today" + "_$formattedVersionNumber.sql"
$newFilePath = Join-Path -Path $fullPathDbWorldUpdate -ChildPath $newFileName
# Define the SQL content to write to the file
$tableName = '`version`'
$db_version = '`db_version`'
$db_version_content = "'ACDB 335.$major-dev'"
$cache_id = '`cache_id`'
$sqlContent = "UPDATE $tableName SET $db_version=$db_version_content, $cache_id=$major LIMIT 1;"
# Write the content to the new SQL file
$sqlContent | Set-Content -Path $newFilePath
Write-Host "SQL file created: $newFilePath"
Write-Host "SQL content: $sqlContent"
Write-Host ""
Write-Host "Version Updater completed."
Write-Host "Have a nice day :)"

View File

@@ -1,53 +0,0 @@
# The AzerothCore Version Updater for Database Squashes
> [!CAUTION]
> These steps are only for project maintainers who intend to update base files.
## Description of the tool
This tool updates the version in DB and acore.json automatically. Hence, it must run from this directory.
This is how it works step-by-step:
1. Check that all paths look correct.
2. Accept to continue using the tool.
3. The tool will update the acore.json file and increment it by 1.
4. The tool will create a file with the proper UPDATE for world database in `..\..\data\sql\updates\db_world`.
## Run the tool
> [!IMPORTANT]
> This tool CAN NOT be moved outside this directory. If you do it will create files in the wrong places.
1. If you haven't run PowerShell scripts before, you'll need to adjust the execution policy.
- Open PowerShell as an Administrator.
- Run the following command to allow running scripts:
```ps
Set-ExecutionPolicy RemoteSigned -Scope CurrentUser
```
- This allows scripts to run on your system, but they need to be locally created or downloaded from trusted sources.
2. Open PowerShell (PS)
- Press Win + X and select Windows PowerShell (Admin) / Terminal (Admin)
3. Navigate to the script
- In PS, use the `cd` command to change the directory
```ps
cd "C:\AzerothCore\apps\VersionUpdater"
```
4. Run the script
- In PS, run the script
```ps
.\VersionUpdater.ps1
```
5. Follow the instructions given by the tool.
6. Now refer back to the database-squash.md instructions. (Located in ..\..\data\sql\base\)
Completed :)

View File

@@ -3,12 +3,22 @@ import os
import sys
import re
import glob
import subprocess
base_dir = os.getcwd()
# Get the pending directory of the project
base_dir = os.getcwd()
pattern = os.path.join(base_dir, 'data/sql/updates/pending_db_*')
src_directory = glob.glob(pattern)
# Get files from base dir
base_pattern = os.path.join(base_dir, 'data/sql/base/db_*')
base_directory = glob.glob(base_pattern)
# Get files from archive dir
archive_pattern = os.path.join(base_dir, 'data/sql/archive/db_*')
archive_directory = glob.glob(archive_pattern)
# Global variables
error_handler = False
results = {
@@ -17,7 +27,8 @@ results = {
"SQL codestyle check": "Passed",
"INSERT & DELETE safety usage check": "Passed",
"Missing semicolon check": "Passed",
"Backtick check": "Passed"
"Backtick check": "Passed",
"Directory check": "Passed"
}
# Collect all files in all directories
@@ -30,6 +41,24 @@ def collect_files_from_directories(directories: list) -> list:
all_files.append(os.path.join(root, file))
return all_files
# Used to find changed or added files compared to master.
def get_changed_files() -> list:
subprocess.run(["git", "fetch", "origin", "master"], check=True)
result = subprocess.run(
["git", "diff", "--name-status", "origin/master"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
changed_files = []
for line in result.stdout.strip().splitlines():
if not line:
continue
status, path = line.split(maxsplit=1)
if status in ("A", "M"):
changed_files.append(path)
return changed_files
# Main function to parse all the files of the project
def parsing_file(files: list) -> None:
print("Starting AzerothCore SQL Codestyle check...")
@@ -38,19 +67,32 @@ def parsing_file(files: list) -> None:
print("https://www.azerothcore.org/wiki/sql-standards")
print(" ")
# Iterate over all files
# Iterate over all files in data/sql/updates/pending_db_*
for file_path in files:
try:
with open(file_path, 'r', encoding='utf-8') as file:
multiple_blank_lines_check(file, file_path)
trailing_whitespace_check(file, file_path)
sql_check(file, file_path)
insert_delete_safety_check(file, file_path)
semicolon_check(file, file_path)
backtick_check(file, file_path)
except UnicodeDecodeError:
print(f"\n❌ Could not decode file {file_path}")
sys.exit(1)
if "base" not in file_path and "archive" not in file_path:
try:
with open(file_path, 'r', encoding='utf-8') as file:
multiple_blank_lines_check(file, file_path)
trailing_whitespace_check(file, file_path)
sql_check(file, file_path)
insert_delete_safety_check(file, file_path)
semicolon_check(file, file_path)
backtick_check(file, file_path)
except UnicodeDecodeError:
print(f"\n❌ Could not decode file {file_path}")
sys.exit(1)
# Make sure we only check changed or added files when we work with base/archive paths
changed_files = get_changed_files()
# Iterate over all file paths
for file_path in changed_files:
if "base" in file_path or "archive" in file_path:
try:
with open(file_path, "r", encoding="utf-8") as f:
directory_check(f, file_path)
except UnicodeDecodeError:
print(f"\n❌ Could not decode file {file_path}")
sys.exit(1)
# Output the results
print("\n ")
@@ -172,11 +214,6 @@ def semicolon_check(file: io, file_path: str) -> None:
file.seek(0) # Reset file pointer to the start
check_failed = False
sql_statement_regex = re.compile(r'^\s*(SELECT|INSERT|UPDATE|DELETE|REPLACE|SET)\b', re.IGNORECASE)
block_comment_start = re.compile(r'/\*')
block_comment_end = re.compile(r'\*/')
inline_comment = re.compile(r'--.*')
query_open = False
in_block_comment = False
inside_values_block = False
@@ -323,8 +360,31 @@ def backtick_check(file: io, file_path: str) -> None:
error_handler = True
results["Backtick check"] = "Failed"
def directory_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0)
check_failed = False
# Normalize path and split into parts
normalized_path = os.path.normpath(file_path) # handles / and \
path_parts = normalized_path.split(os.sep)
# Fail if '/base/' is part of the path
if "base" in path_parts:
print(f"{file_path} is changed/added in the base directory.\nIf this is intended, please notify a maintainer.")
check_failed = True
# Fail if '/archive/' is part of the path
if "archive" in path_parts:
print(f"{file_path} is changed/added in the archive directory.\nIf this is intended, please notify a maintainer.")
check_failed = True
if check_failed:
error_handler = True
results["Directory check"] = "Failed"
# Collect all files from matching directories
all_files = collect_files_from_directories(src_directory)
all_files = collect_files_from_directories(src_directory) + collect_files_from_directories(base_directory) + collect_files_from_directories(archive_directory)
# Main function
parsing_file(all_files)

View File

@@ -1 +0,0 @@
config.sh

View File

@@ -1,12 +0,0 @@
This script is used by devs to export the databases to base directories
You should use it on clean databases
## USAGE
NOTE: this script is only working under unix currently
1) You must create a config.sh file changing DB connection configurations
of /conf/config.sh.dist
2) Run the db_export.sh script and wait

View File

@@ -1,52 +0,0 @@
#!/usr/bin/env bash
ROOTPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../../" && pwd )"
source "$ROOTPATH/apps/bash_shared/includes.sh"
if [ -f "./config.sh" ]; then
source "./config.sh" # should overwrite previous
fi
echo "This is a dev-only procedure to export the DB into the SQL base files. All base files will be overwritten."
read -p "Are you sure you want to continue (y/N)? " choice
case "$choice" in
y|Y ) echo "Exporting the DB into the SQL base files...";;
* ) return;;
esac
echo "===== STARTING PROCESS ====="
function export() {
echo "Working on: "$1
database=$1
var_base_path="DB_"$database"_PATHS"
base_path=${!var_base_path%/}
base_conf="TPATH="$base_path";\
CLEANFOLDER=1; \
CHMODE=0; \
TEXTDUMPS=0; \
PARSEDUMP=1; \
FULL=0; \
DUMPOPTS='--skip-comments --skip-set-charset --routines --extended-insert --order-by-primary --single-transaction --quick'; \
"
var_base_conf="DB_"$database"_CONF"
base_conf=$base_conf${!var_base_conf}
var_base_name="DB_"$database"_NAME"
base_name=${!var_base_name}
bash "$AC_PATH_DEPS/acore/mysql-tools/mysql-tools" "dump" "" "$base_name" "" "$base_conf"
}
for db in ${DATABASES[@]}
do
export "$db"
done
echo "===== DONE ====="