mirror of
https://github.com/uprightbass360/AzerothCore-RealmMaster.git
synced 2026-01-13 00:58:34 +00:00
refactor: reorganize scripts under bash/python
This commit is contained in:
323
scripts/python/apply-config.py
Executable file
323
scripts/python/apply-config.py
Executable file
@@ -0,0 +1,323 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
AzerothCore Configuration Manager
|
||||
|
||||
Reads server-overrides.conf and preset files to update actual .conf files
|
||||
while preserving comments and structure.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import configparser
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
||||
|
||||
class ConfigManager:
|
||||
"""Manages AzerothCore configuration file updates."""
|
||||
|
||||
def __init__(self, storage_path: str, overrides_file: str, dry_run: bool = False):
|
||||
self.storage_path = Path(storage_path)
|
||||
self.config_dir = self.storage_path / "config"
|
||||
self.modules_config_dir = self.storage_path / "config" / "modules"
|
||||
self.overrides_file = Path(overrides_file)
|
||||
self.dry_run = dry_run
|
||||
|
||||
if not self.config_dir.exists():
|
||||
raise FileNotFoundError(f"Config directory not found: {self.config_dir}")
|
||||
|
||||
def load_overrides(self) -> Dict[str, Dict[str, str]]:
|
||||
"""Load configuration overrides from INI-style file."""
|
||||
if not self.overrides_file.exists():
|
||||
print(f"⚠️ Override file not found: {self.overrides_file}")
|
||||
return {}
|
||||
|
||||
config = configparser.ConfigParser(interpolation=None)
|
||||
config.optionxform = str # Preserve case sensitivity
|
||||
|
||||
try:
|
||||
config.read(self.overrides_file, encoding='utf-8')
|
||||
except Exception as e:
|
||||
print(f"❌ Error reading override file: {e}")
|
||||
return {}
|
||||
|
||||
overrides = {}
|
||||
for section in config.sections():
|
||||
overrides[section] = dict(config.items(section))
|
||||
|
||||
return overrides
|
||||
|
||||
def find_conf_file(self, filename: str) -> Optional[Path]:
|
||||
"""Find a configuration file in the config directory."""
|
||||
# Check main config directory first (for core server configs)
|
||||
conf_file = self.config_dir / filename
|
||||
|
||||
if conf_file.exists():
|
||||
return conf_file
|
||||
|
||||
# Check modules config directory (for module configs)
|
||||
modules_conf_file = self.modules_config_dir / filename
|
||||
if modules_conf_file.exists():
|
||||
return modules_conf_file
|
||||
|
||||
# Try to create from .dist file in main config directory
|
||||
dist_file = self.config_dir / f"{filename}.dist"
|
||||
if dist_file.exists():
|
||||
print(f"📄 Creating {filename} from {filename}.dist")
|
||||
if not self.dry_run:
|
||||
shutil.copy2(dist_file, conf_file)
|
||||
return conf_file
|
||||
|
||||
# Try to create from .dist file in modules directory
|
||||
modules_dist_file = self.modules_config_dir / f"{filename}.dist"
|
||||
if modules_dist_file.exists():
|
||||
print(f"📄 Creating {filename} from modules/{filename}.dist")
|
||||
if not self.dry_run:
|
||||
if not self.modules_config_dir.exists():
|
||||
self.modules_config_dir.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(modules_dist_file, modules_conf_file)
|
||||
return modules_conf_file
|
||||
|
||||
return None
|
||||
|
||||
def update_conf_file(self, conf_file: Path, settings: Dict[str, str]) -> bool:
|
||||
"""Update a .conf file with new settings while preserving structure."""
|
||||
if not conf_file.exists():
|
||||
print(f"❌ Configuration file not found: {conf_file}")
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(conf_file, 'r', encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
except Exception as e:
|
||||
print(f"❌ Error reading {conf_file}: {e}")
|
||||
return False
|
||||
|
||||
updated_lines = []
|
||||
updated_keys = set()
|
||||
|
||||
# Process each line
|
||||
for line in lines:
|
||||
original_line = line
|
||||
stripped = line.strip()
|
||||
|
||||
# Skip empty lines and comments
|
||||
if not stripped or stripped.startswith('#'):
|
||||
updated_lines.append(original_line)
|
||||
continue
|
||||
|
||||
# Check if this line contains a setting we want to override
|
||||
setting_match = re.match(r'^([^=]+?)\s*=\s*(.*)$', stripped)
|
||||
if setting_match:
|
||||
key = setting_match.group(1).strip()
|
||||
|
||||
if key in settings:
|
||||
# Replace with our override value
|
||||
new_value = settings[key]
|
||||
# Preserve the original indentation
|
||||
indent = len(line) - len(line.lstrip())
|
||||
new_line = ' ' * indent + f"{key} = {new_value}\n"
|
||||
updated_lines.append(new_line)
|
||||
updated_keys.add(key)
|
||||
print(f" ✅ {key} = {new_value}")
|
||||
else:
|
||||
# Keep original line
|
||||
updated_lines.append(original_line)
|
||||
else:
|
||||
# Keep original line (could be section header or other content)
|
||||
updated_lines.append(original_line)
|
||||
|
||||
# Add any settings that weren't found in the file
|
||||
for key, value in settings.items():
|
||||
if key not in updated_keys:
|
||||
updated_lines.append(f"{key} = {value}\n")
|
||||
print(f" ➕ {key} = {value} (added)")
|
||||
|
||||
# Write the updated file
|
||||
if not self.dry_run:
|
||||
try:
|
||||
with open(conf_file, 'w', encoding='utf-8') as f:
|
||||
f.writelines(updated_lines)
|
||||
except Exception as e:
|
||||
print(f"❌ Error writing {conf_file}: {e}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def apply_overrides(self, overrides: Dict[str, Dict[str, str]],
|
||||
filter_files: Optional[Set[str]] = None) -> bool:
|
||||
"""Apply all configuration overrides."""
|
||||
success = True
|
||||
|
||||
if not overrides:
|
||||
print("ℹ️ No configuration overrides to apply")
|
||||
return True
|
||||
|
||||
print(f"🔧 Applying configuration overrides{' (DRY RUN)' if self.dry_run else ''}...")
|
||||
|
||||
for conf_filename, settings in overrides.items():
|
||||
# Skip if we're filtering and this file isn't in the filter
|
||||
if filter_files and conf_filename not in filter_files:
|
||||
continue
|
||||
|
||||
if not settings:
|
||||
continue
|
||||
|
||||
print(f"\n📝 Updating {conf_filename}:")
|
||||
|
||||
# Find the configuration file
|
||||
conf_file = self.find_conf_file(conf_filename)
|
||||
if not conf_file:
|
||||
print(f" ⚠️ Configuration file not found: {conf_filename}")
|
||||
success = False
|
||||
continue
|
||||
|
||||
# Update the file
|
||||
if not self.update_conf_file(conf_file, settings):
|
||||
success = False
|
||||
|
||||
return success
|
||||
|
||||
|
||||
def load_preset(preset_file: Path) -> Dict[str, Dict[str, str]]:
|
||||
"""Load a preset configuration file."""
|
||||
if not preset_file.exists():
|
||||
raise FileNotFoundError(f"Preset file not found: {preset_file}")
|
||||
|
||||
config = configparser.ConfigParser(interpolation=None)
|
||||
config.optionxform = str # Preserve case sensitivity
|
||||
config.read(preset_file, encoding='utf-8')
|
||||
|
||||
overrides = {}
|
||||
for section in config.sections():
|
||||
overrides[section] = dict(config.items(section))
|
||||
|
||||
return overrides
|
||||
|
||||
|
||||
def list_available_presets(preset_dir: Path) -> List[str]:
|
||||
"""List available preset files."""
|
||||
if not preset_dir.exists():
|
||||
return []
|
||||
|
||||
presets = []
|
||||
for preset_file in preset_dir.glob("*.conf"):
|
||||
presets.append(preset_file.stem)
|
||||
|
||||
return sorted(presets)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Apply AzerothCore configuration overrides and presets"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--storage-path",
|
||||
default="./storage",
|
||||
help="Path to storage directory (default: ./storage)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--overrides-file",
|
||||
default="./config/server-overrides.conf",
|
||||
help="Path to server overrides file (default: ./config/server-overrides.conf)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--preset",
|
||||
help="Apply a preset from config/presets/<name>.conf"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--list-presets",
|
||||
action="store_true",
|
||||
help="List available presets"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--files",
|
||||
help="Comma-separated list of .conf files to update (default: all)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Show what would be changed without making modifications"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Handle list presets
|
||||
if args.list_presets:
|
||||
preset_dir = Path("./config/presets")
|
||||
presets = list_available_presets(preset_dir)
|
||||
|
||||
if presets:
|
||||
print("📋 Available presets:")
|
||||
for preset in presets:
|
||||
preset_file = preset_dir / f"{preset}.conf"
|
||||
print(f" • {preset}")
|
||||
# Try to read description from preset file
|
||||
if preset_file.exists():
|
||||
try:
|
||||
with open(preset_file, 'r') as f:
|
||||
first_line = f.readline().strip()
|
||||
if first_line.startswith('#') and len(first_line) > 1:
|
||||
description = first_line[1:].strip()
|
||||
print(f" {description}")
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
print("ℹ️ No presets found in config/presets/")
|
||||
return
|
||||
|
||||
try:
|
||||
# Initialize configuration manager
|
||||
config_manager = ConfigManager(
|
||||
storage_path=args.storage_path,
|
||||
overrides_file=args.overrides_file,
|
||||
dry_run=args.dry_run
|
||||
)
|
||||
|
||||
# Determine which files to filter (if any)
|
||||
filter_files = None
|
||||
if args.files:
|
||||
filter_files = set(f.strip() for f in args.files.split(','))
|
||||
|
||||
# Load configuration overrides
|
||||
overrides = {}
|
||||
|
||||
# Load preset if specified
|
||||
if args.preset:
|
||||
preset_file = Path(f"./config/presets/{args.preset}.conf")
|
||||
print(f"📦 Loading preset: {args.preset}")
|
||||
try:
|
||||
preset_overrides = load_preset(preset_file)
|
||||
overrides.update(preset_overrides)
|
||||
except FileNotFoundError as e:
|
||||
print(f"❌ {e}")
|
||||
return 1
|
||||
|
||||
# Load server overrides (this can override preset values)
|
||||
server_overrides = config_manager.load_overrides()
|
||||
overrides.update(server_overrides)
|
||||
|
||||
# Apply all overrides
|
||||
success = config_manager.apply_overrides(overrides, filter_files)
|
||||
|
||||
if success:
|
||||
if args.dry_run:
|
||||
print("\n✅ Configuration validation complete")
|
||||
else:
|
||||
print("\n✅ Configuration applied successfully")
|
||||
print("ℹ️ Restart your server to apply changes")
|
||||
return 0
|
||||
else:
|
||||
print("\n❌ Some configuration updates failed")
|
||||
return 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error: {e}")
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
76
scripts/python/check_module_staging.py
Executable file
76
scripts/python/check_module_staging.py
Executable file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def load_module_state(root: Path) -> dict:
|
||||
env_path = root / ".env"
|
||||
manifest_path = root / "config" / "module-manifest.json"
|
||||
modules_py = root / "scripts" / "modules.py"
|
||||
|
||||
try:
|
||||
output = subprocess.check_output(
|
||||
[
|
||||
sys.executable,
|
||||
str(modules_py),
|
||||
"--env-path",
|
||||
str(env_path),
|
||||
"--manifest",
|
||||
str(manifest_path),
|
||||
"dump",
|
||||
"--format",
|
||||
"json",
|
||||
],
|
||||
text=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print("Unable to load module state:", exc, file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
return json.loads(output)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
root = Path(__file__).resolve().parents[1]
|
||||
data = load_module_state(root)
|
||||
|
||||
enabled_modules = [m for m in data["modules"] if m["enabled"]]
|
||||
storage_dir = root / "storage" / "modules"
|
||||
|
||||
local_root = Path(os.environ.get("STORAGE_PATH_LOCAL", "./local-storage"))
|
||||
local_root = (root / local_root).resolve()
|
||||
requires_playerbots = any(m["key"] == "MODULE_PLAYERBOTS" and m["enabled"] for m in enabled_modules)
|
||||
source_dir = local_root / "source"
|
||||
source_dir = source_dir / ("azerothcore-playerbots" if requires_playerbots else "azerothcore") / "modules"
|
||||
|
||||
print(f"📦 Checking module staging in {storage_dir} and {source_dir}")
|
||||
print("Enabled modules:", ", ".join(m["name"] for m in enabled_modules))
|
||||
|
||||
status = 0
|
||||
for module in enabled_modules:
|
||||
dir_name = module["name"]
|
||||
storage_path = storage_dir / dir_name
|
||||
source_path = source_dir / dir_name
|
||||
|
||||
def state(path: Path) -> str:
|
||||
if (path / ".git").is_dir():
|
||||
return "git"
|
||||
if path.is_dir():
|
||||
return "present"
|
||||
return "missing"
|
||||
|
||||
storage_state = state(storage_path)
|
||||
source_state = state(source_path)
|
||||
print(f" - {dir_name} ({module['key']}): storage={storage_state}, source={source_state}")
|
||||
|
||||
if storage_state == "missing" or source_state == "missing":
|
||||
status = 1
|
||||
|
||||
return status
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
546
scripts/python/modules.py
Executable file
546
scripts/python/modules.py
Executable file
@@ -0,0 +1,546 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Module manifest helper.
|
||||
|
||||
Reads config/module-manifest.json and .env to produce canonical module state that
|
||||
downstream shell scripts can consume for staging, rebuild detection, and
|
||||
dependency validation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
from dataclasses import dataclass, asdict, field
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, List, Optional, Tuple
|
||||
import shlex
|
||||
|
||||
|
||||
STRICT_TRUE = {"1", "true", "yes", "on"}
|
||||
|
||||
|
||||
def parse_bool(value: str) -> bool:
|
||||
if value is None:
|
||||
return False
|
||||
return str(value).strip().lower() in STRICT_TRUE
|
||||
|
||||
|
||||
def load_env_file(env_path: Path) -> Dict[str, str]:
|
||||
if not env_path.exists():
|
||||
return {}
|
||||
env: Dict[str, str] = {}
|
||||
for raw_line in env_path.read_text(encoding="utf-8").splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
if line.startswith("export "):
|
||||
line = line[len("export ") :].strip()
|
||||
if "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
if value.startswith('"') and value.endswith('"'):
|
||||
value = value[1:-1]
|
||||
elif value.startswith("'") and value.endswith("'"):
|
||||
value = value[1:-1]
|
||||
env[key] = value
|
||||
return env
|
||||
|
||||
|
||||
def load_manifest(manifest_path: Path) -> List[Dict[str, object]]:
|
||||
if not manifest_path.exists():
|
||||
raise FileNotFoundError(f"Manifest file not found: {manifest_path}")
|
||||
with manifest_path.open("r", encoding="utf-8") as fh:
|
||||
manifest = json.load(fh)
|
||||
modules = manifest.get("modules")
|
||||
if not isinstance(modules, list):
|
||||
raise ValueError("Manifest must define a top-level 'modules' array")
|
||||
validated: List[Dict[str, object]] = []
|
||||
seen_keys: set[str] = set()
|
||||
for entry in modules:
|
||||
if not isinstance(entry, dict):
|
||||
raise ValueError("Each manifest entry must be an object")
|
||||
key = entry.get("key")
|
||||
name = entry.get("name")
|
||||
repo = entry.get("repo")
|
||||
if not key or not isinstance(key, str):
|
||||
raise ValueError("Manifest entry missing 'key'")
|
||||
if key in seen_keys:
|
||||
raise ValueError(f"Duplicate manifest key detected: {key}")
|
||||
seen_keys.add(key)
|
||||
if not name or not isinstance(name, str):
|
||||
raise ValueError(f"Manifest entry {key} missing 'name'")
|
||||
if not repo or not isinstance(repo, str):
|
||||
raise ValueError(f"Manifest entry {key} missing 'repo'")
|
||||
validated.append(entry)
|
||||
return validated
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModuleState:
|
||||
key: str
|
||||
name: str
|
||||
repo: str
|
||||
needs_build: bool
|
||||
module_type: str
|
||||
requires: List[str] = field(default_factory=list)
|
||||
ref: Optional[str] = None
|
||||
status: str = "active"
|
||||
block_reason: Optional[str] = None
|
||||
post_install_hooks: List[str] = field(default_factory=list)
|
||||
config_cleanup: List[str] = field(default_factory=list)
|
||||
sql: Optional[object] = None
|
||||
notes: Optional[str] = None
|
||||
enabled_raw: bool = False
|
||||
enabled_effective: bool = False
|
||||
value: str = "0"
|
||||
dependency_issues: List[str] = field(default_factory=list)
|
||||
warnings: List[str] = field(default_factory=list)
|
||||
errors: List[str] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def blocked(self) -> bool:
|
||||
return self.status.lower() == "blocked"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModuleCollectionState:
|
||||
manifest_path: Path
|
||||
env_path: Path
|
||||
modules: List[ModuleState]
|
||||
generated_at: datetime
|
||||
warnings: List[str]
|
||||
errors: List[str]
|
||||
|
||||
def enabled_modules(self) -> List[ModuleState]:
|
||||
return [module for module in self.modules if module.enabled_effective]
|
||||
|
||||
def compile_modules(self) -> List[ModuleState]:
|
||||
return [
|
||||
module
|
||||
for module in self.modules
|
||||
if module.enabled_effective and module.needs_build
|
||||
]
|
||||
|
||||
def requires_playerbot_source(self) -> bool:
|
||||
module_map = {m.key: m for m in self.modules}
|
||||
playerbots_enabled = module_map.get("MODULE_PLAYERBOTS")
|
||||
return bool(playerbots_enabled and playerbots_enabled.enabled_effective)
|
||||
|
||||
def requires_custom_build(self) -> bool:
|
||||
return any(module.needs_build and module.enabled_effective for module in self.modules)
|
||||
|
||||
|
||||
def build_state(env_path: Path, manifest_path: Path) -> ModuleCollectionState:
|
||||
env_map = load_env_file(env_path)
|
||||
manifest_entries = load_manifest(manifest_path)
|
||||
modules: List[ModuleState] = []
|
||||
errors: List[str] = []
|
||||
warnings: List[str] = []
|
||||
|
||||
# Track which manifest keys appear in .env for coverage validation
|
||||
env_keys_in_manifest: set[str] = set()
|
||||
|
||||
for entry in manifest_entries:
|
||||
key = entry["key"]
|
||||
name = entry["name"]
|
||||
repo = entry["repo"]
|
||||
module_type = str(entry.get("type", "cpp"))
|
||||
needs_build_flag = entry.get("needs_build")
|
||||
if needs_build_flag is None:
|
||||
needs_build = module_type.lower() == "cpp"
|
||||
else:
|
||||
needs_build = bool(needs_build_flag)
|
||||
requires = entry.get("requires") or []
|
||||
if not isinstance(requires, list):
|
||||
raise ValueError(f"Manifest entry {key} has non-list 'requires'")
|
||||
requires = [str(dep) for dep in requires]
|
||||
|
||||
status = entry.get("status", "active")
|
||||
block_reason = entry.get("block_reason")
|
||||
post_install_hooks = entry.get("post_install_hooks") or []
|
||||
if not isinstance(post_install_hooks, list):
|
||||
raise ValueError(f"Manifest entry {key} has non-list 'post_install_hooks'")
|
||||
post_install_hooks = [str(hook) for hook in post_install_hooks]
|
||||
config_cleanup = entry.get("config_cleanup") or []
|
||||
if not isinstance(config_cleanup, list):
|
||||
raise ValueError(f"Manifest entry {key} has non-list 'config_cleanup'")
|
||||
config_cleanup = [str(pattern) for pattern in config_cleanup]
|
||||
sql = entry.get("sql")
|
||||
ref = entry.get("ref")
|
||||
notes = entry.get("notes")
|
||||
|
||||
raw_value = env_map.get(key, os.environ.get(key, "0"))
|
||||
env_keys_in_manifest.add(key)
|
||||
enabled_raw = parse_bool(raw_value)
|
||||
|
||||
module = ModuleState(
|
||||
key=key,
|
||||
name=name,
|
||||
repo=repo,
|
||||
needs_build=needs_build,
|
||||
module_type=module_type,
|
||||
requires=requires,
|
||||
ref=ref,
|
||||
status=status,
|
||||
block_reason=block_reason,
|
||||
post_install_hooks=post_install_hooks,
|
||||
config_cleanup=config_cleanup,
|
||||
sql=sql,
|
||||
notes=notes,
|
||||
enabled_raw=enabled_raw,
|
||||
)
|
||||
|
||||
if module.blocked and enabled_raw:
|
||||
module.errors.append(
|
||||
f"{module.key} is blocked: {module.block_reason or 'blocked in manifest'}"
|
||||
)
|
||||
|
||||
# Effective enablement respects block status
|
||||
module.enabled_effective = enabled_raw and not module.blocked
|
||||
module.value = "1" if module.enabled_effective else "0"
|
||||
|
||||
modules.append(module)
|
||||
|
||||
module_map: Dict[str, ModuleState] = {module.key: module for module in modules}
|
||||
|
||||
# Dependency validation
|
||||
for module in modules:
|
||||
if not module.enabled_effective:
|
||||
continue
|
||||
missing: List[str] = []
|
||||
for dependency in module.requires:
|
||||
dep_state = module_map.get(dependency)
|
||||
if not dep_state or not dep_state.enabled_effective:
|
||||
missing.append(dependency)
|
||||
if missing:
|
||||
plural = "modules" if len(missing) > 1 else "module"
|
||||
list_str = ", ".join(missing)
|
||||
message = f"{module.key} requires {plural}: {list_str}"
|
||||
module.errors.append(message)
|
||||
|
||||
# Collect warnings/errors
|
||||
for module in modules:
|
||||
if module.errors:
|
||||
errors.extend(module.errors)
|
||||
if module.warnings:
|
||||
warnings.extend(module.warnings)
|
||||
|
||||
# Warn if .env defines modules not in manifest
|
||||
extra_env_modules = [
|
||||
key for key in env_map.keys() if key.startswith("MODULE_") and key not in module_map
|
||||
]
|
||||
for unknown_key in extra_env_modules:
|
||||
warnings.append(f".env defines {unknown_key} but it is missing from the manifest")
|
||||
|
||||
# Warn if manifest entry lacks .env toggle
|
||||
for module in modules:
|
||||
if module.key not in env_map and module.key not in os.environ:
|
||||
warnings.append(
|
||||
f"Manifest includes {module.key} but .env does not define it (defaulting to 0)"
|
||||
)
|
||||
|
||||
return ModuleCollectionState(
|
||||
manifest_path=manifest_path,
|
||||
env_path=env_path,
|
||||
modules=modules,
|
||||
generated_at=datetime.now(timezone.utc),
|
||||
warnings=warnings,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
def write_outputs(state: ModuleCollectionState, output_dir: Path) -> None:
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
env_lines: List[str] = [
|
||||
"# Autogenerated by scripts/python/modules.py",
|
||||
f"# Generated at {state.generated_at.isoformat()}",
|
||||
f'export MODULES_MANIFEST="{state.manifest_path}"',
|
||||
f'export MODULES_ENV_PATH="{state.env_path}"',
|
||||
]
|
||||
|
||||
enabled_names: List[str] = []
|
||||
compile_names: List[str] = []
|
||||
enabled_keys: List[str] = []
|
||||
compile_keys: List[str] = []
|
||||
|
||||
for module in state.modules:
|
||||
env_lines.append(f"export {module.key}={module.value}")
|
||||
if module.enabled_effective:
|
||||
enabled_names.append(module.name)
|
||||
enabled_keys.append(module.key)
|
||||
if module.enabled_effective and module.needs_build:
|
||||
compile_names.append(module.name)
|
||||
compile_keys.append(module.key)
|
||||
|
||||
env_lines.append(f'export MODULES_ENABLED="{ " ".join(enabled_names) }"'.rstrip())
|
||||
env_lines.append(f'export MODULES_COMPILE="{ " ".join(compile_names) }"'.rstrip())
|
||||
env_lines.append(f'export MODULES_ENABLED_LIST="{",".join(enabled_keys)}"')
|
||||
env_lines.append(f'export MODULES_CPP_LIST="{",".join(compile_keys)}"')
|
||||
env_lines.append(
|
||||
f"export MODULES_REQUIRES_PLAYERBOT_SOURCE="
|
||||
f'{"1" if state.requires_playerbot_source() else "0"}'
|
||||
)
|
||||
env_lines.append(
|
||||
f"export MODULES_REQUIRES_CUSTOM_BUILD="
|
||||
f'{"1" if state.requires_custom_build() else "0"}'
|
||||
)
|
||||
env_lines.append(f"export MODULES_WARNING_COUNT={len(state.warnings)}")
|
||||
env_lines.append(f"export MODULES_ERROR_COUNT={len(state.errors)}")
|
||||
|
||||
modules_env_path = output_dir / "modules.env"
|
||||
modules_env_path.write_text("\n".join(env_lines) + "\n", encoding="utf-8")
|
||||
|
||||
state_payload = {
|
||||
"generated_at": state.generated_at.isoformat(),
|
||||
"manifest_path": str(state.manifest_path),
|
||||
"env_path": str(state.env_path),
|
||||
"warnings": state.warnings,
|
||||
"errors": state.errors,
|
||||
"modules": [
|
||||
{
|
||||
**asdict(module),
|
||||
"enabled_raw": module.enabled_raw,
|
||||
"enabled_effective": module.enabled_effective,
|
||||
"blocked": module.blocked,
|
||||
}
|
||||
for module in state.modules
|
||||
],
|
||||
"enabled_modules": [module.name for module in state.enabled_modules()],
|
||||
"compile_modules": [module.name for module in state.compile_modules()],
|
||||
"requires_playerbot_source": state.requires_playerbot_source(),
|
||||
"requires_custom_build": state.requires_custom_build(),
|
||||
}
|
||||
|
||||
modules_state_path = output_dir / "modules-state.json"
|
||||
modules_state_path.write_text(
|
||||
json.dumps(state_payload, indent=2, sort_keys=True) + "\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
meta_dir = output_dir / ".modules-meta"
|
||||
meta_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
compile_list_path = meta_dir / "modules-compile.txt"
|
||||
compile_list_path.write_text(
|
||||
"\n".join(state_payload["compile_modules"]) + ("\n" if compile_names else ""),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
enabled_list_path = meta_dir / "modules-enabled.txt"
|
||||
enabled_list_path.write_text(
|
||||
"\n".join(state_payload["enabled_modules"]) + ("\n" if enabled_names else ""),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
|
||||
def print_list(state: ModuleCollectionState, selector: str) -> None:
|
||||
if selector == "compile":
|
||||
items = [module.name for module in state.compile_modules()]
|
||||
elif selector == "enabled":
|
||||
items = [module.name for module in state.enabled_modules()]
|
||||
elif selector == "keys":
|
||||
items = [module.key for module in state.enabled_modules()]
|
||||
else:
|
||||
raise ValueError(f"Unknown list selector: {selector}")
|
||||
for item in items:
|
||||
print(item)
|
||||
|
||||
|
||||
def print_requires_playerbot(state: ModuleCollectionState) -> None:
|
||||
print("1" if state.requires_playerbot_source() else "0")
|
||||
|
||||
|
||||
|
||||
def print_requires_custom_build(state: ModuleCollectionState) -> None:
|
||||
print("1" if state.requires_custom_build() else "0")
|
||||
|
||||
|
||||
def print_state(state: ModuleCollectionState, fmt: str) -> None:
|
||||
payload = {
|
||||
"generated_at": state.generated_at.isoformat(),
|
||||
"warnings": state.warnings,
|
||||
"errors": state.errors,
|
||||
"modules": [
|
||||
{
|
||||
"key": module.key,
|
||||
"name": module.name,
|
||||
"enabled": module.enabled_effective,
|
||||
"needs_build": module.needs_build,
|
||||
"requires": module.requires,
|
||||
"blocked": module.blocked,
|
||||
"dependency_issues": module.dependency_issues,
|
||||
"post_install_hooks": module.post_install_hooks,
|
||||
"config_cleanup": module.config_cleanup,
|
||||
}
|
||||
for module in state.modules
|
||||
],
|
||||
"enabled_modules": [module.name for module in state.enabled_modules()],
|
||||
"compile_modules": [module.name for module in state.compile_modules()],
|
||||
"requires_playerbot_source": state.requires_playerbot_source(),
|
||||
}
|
||||
if fmt == "json":
|
||||
json.dump(payload, sys.stdout, indent=2, sort_keys=True)
|
||||
sys.stdout.write("\n")
|
||||
elif fmt == "shell":
|
||||
keys = [module.key for module in state.modules]
|
||||
quoted_keys = " ".join(shlex.quote(key) for key in keys)
|
||||
print(f"MODULE_KEYS=({quoted_keys})")
|
||||
print(
|
||||
"declare -A MODULE_NAME MODULE_REPO MODULE_REF MODULE_TYPE MODULE_ENABLED "
|
||||
"MODULE_NEEDS_BUILD MODULE_BLOCKED MODULE_POST_INSTALL MODULE_REQUIRES "
|
||||
"MODULE_CONFIG_CLEANUP "
|
||||
"MODULE_NOTES MODULE_STATUS MODULE_BLOCK_REASON"
|
||||
)
|
||||
for module in state.modules:
|
||||
key = module.key
|
||||
post_install = ",".join(module.post_install_hooks)
|
||||
dependencies = ",".join(module.requires)
|
||||
block_reason = module.block_reason or ""
|
||||
ref = module.ref or ""
|
||||
notes = module.notes or ""
|
||||
config_cleanup = ",".join(module.config_cleanup)
|
||||
print(f"MODULE_NAME[{key}]={shlex.quote(module.name)}")
|
||||
print(f"MODULE_REPO[{key}]={shlex.quote(module.repo)}")
|
||||
print(f"MODULE_REF[{key}]={shlex.quote(ref)}")
|
||||
print(f"MODULE_TYPE[{key}]={shlex.quote(module.module_type)}")
|
||||
print(f"MODULE_ENABLED[{key}]={1 if module.enabled_effective else 0}")
|
||||
print(f"MODULE_NEEDS_BUILD[{key}]={1 if module.needs_build else 0}")
|
||||
print(f"MODULE_BLOCKED[{key}]={1 if module.blocked else 0}")
|
||||
print(f"MODULE_POST_INSTALL[{key}]={shlex.quote(post_install)}")
|
||||
print(f"MODULE_REQUIRES[{key}]={shlex.quote(dependencies)}")
|
||||
print(f"MODULE_CONFIG_CLEANUP[{key}]={shlex.quote(config_cleanup)}")
|
||||
print(f"MODULE_NOTES[{key}]={shlex.quote(notes)}")
|
||||
print(f"MODULE_STATUS[{key}]={shlex.quote(module.status)}")
|
||||
print(f"MODULE_BLOCK_REASON[{key}]={shlex.quote(block_reason)}")
|
||||
else:
|
||||
raise ValueError(f"Unsupported format: {fmt}")
|
||||
|
||||
|
||||
def handle_generate(args: argparse.Namespace) -> int:
|
||||
env_path = Path(args.env_path).resolve()
|
||||
manifest_path = Path(args.manifest).resolve()
|
||||
output_dir = Path(args.output_dir).resolve()
|
||||
state = build_state(env_path, manifest_path)
|
||||
write_outputs(state, output_dir)
|
||||
|
||||
if state.warnings:
|
||||
warning_block = "\n".join(f"- {warning}" for warning in state.warnings)
|
||||
print(
|
||||
textwrap.dedent(
|
||||
f"""\
|
||||
⚠️ Module manifest warnings detected:
|
||||
{warning_block}
|
||||
"""
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
if state.errors:
|
||||
error_block = "\n".join(f"- {error}" for error in state.errors)
|
||||
print(
|
||||
textwrap.dedent(
|
||||
f"""\
|
||||
❌ Module manifest errors detected:
|
||||
{error_block}
|
||||
"""
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def configure_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(description="Module manifest helper")
|
||||
parser.add_argument(
|
||||
"--env-path",
|
||||
default=".env",
|
||||
help="Path to .env file (default: .env)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--manifest",
|
||||
default="config/module-manifest.json",
|
||||
help="Path to module manifest (default: config/module-manifest.json)",
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
generate_parser = subparsers.add_parser("generate", help="Generate module state files")
|
||||
generate_parser.add_argument(
|
||||
"--output-dir",
|
||||
default="local-storage/modules",
|
||||
help="Directory for generated module artifacts (default: local-storage/modules)",
|
||||
)
|
||||
generate_parser.set_defaults(func=handle_generate)
|
||||
|
||||
list_parser = subparsers.add_parser("list", help="Print module lists")
|
||||
list_parser.add_argument(
|
||||
"--type",
|
||||
choices=["compile", "enabled", "keys"],
|
||||
default="compile",
|
||||
help="List selector (default: compile)",
|
||||
)
|
||||
|
||||
def handle_list(args: argparse.Namespace) -> int:
|
||||
state = build_state(Path(args.env_path).resolve(), Path(args.manifest).resolve())
|
||||
print_list(state, args.type)
|
||||
return 1 if state.errors else 0
|
||||
|
||||
list_parser.set_defaults(func=handle_list)
|
||||
|
||||
rps_parser = subparsers.add_parser(
|
||||
"requires-playerbot", help="Print 1 if playerbot source is required else 0"
|
||||
)
|
||||
|
||||
def handle_requires_playerbot(args: argparse.Namespace) -> int:
|
||||
state = build_state(Path(args.env_path).resolve(), Path(args.manifest).resolve())
|
||||
print_requires_playerbot(state)
|
||||
return 1 if state.errors else 0
|
||||
|
||||
rps_parser.set_defaults(func=handle_requires_playerbot)
|
||||
|
||||
rcb_parser = subparsers.add_parser(
|
||||
"requires-custom-build",
|
||||
help="Print 1 if a custom source build is required else 0",
|
||||
)
|
||||
|
||||
def handle_requires_custom_build(args: argparse.Namespace) -> int:
|
||||
state = build_state(Path(args.env_path).resolve(), Path(args.manifest).resolve())
|
||||
print_requires_custom_build(state)
|
||||
return 1 if state.errors else 0
|
||||
|
||||
rcb_parser.set_defaults(func=handle_requires_custom_build)
|
||||
|
||||
dump_parser = subparsers.add_parser("dump", help="Dump module state (JSON format)")
|
||||
dump_parser.add_argument(
|
||||
"--format",
|
||||
choices=["json", "shell"],
|
||||
default="json",
|
||||
help="Output format (default: json)",
|
||||
)
|
||||
|
||||
def handle_dump(args: argparse.Namespace) -> int:
|
||||
state = build_state(Path(args.env_path).resolve(), Path(args.manifest).resolve())
|
||||
print_state(state, args.format)
|
||||
return 1 if state.errors else 0
|
||||
|
||||
dump_parser.set_defaults(func=handle_dump)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv: Optional[Iterable[str]] = None) -> int:
|
||||
parser = configure_parser()
|
||||
args = parser.parse_args(argv)
|
||||
return args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
92
scripts/python/parse-config-presets.py
Executable file
92
scripts/python/parse-config-presets.py
Executable file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Parse configuration preset metadata for setup.sh
|
||||
"""
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def parse_preset_metadata(preset_file: Path):
|
||||
"""Parse CONFIG_NAME and CONFIG_DESCRIPTION from a preset file."""
|
||||
if not preset_file.exists():
|
||||
return None, None
|
||||
|
||||
config_name = None
|
||||
config_description = None
|
||||
|
||||
try:
|
||||
with open(preset_file, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line.startswith('# CONFIG_NAME:'):
|
||||
config_name = line[14:].strip()
|
||||
elif line.startswith('# CONFIG_DESCRIPTION:'):
|
||||
config_description = line[21:].strip()
|
||||
elif not line.startswith('#'):
|
||||
# Stop at first non-comment line
|
||||
break
|
||||
except Exception:
|
||||
return None, None
|
||||
|
||||
return config_name, config_description
|
||||
|
||||
|
||||
def list_presets(presets_dir: Path):
|
||||
"""List all available presets with their metadata."""
|
||||
if not presets_dir.exists():
|
||||
return
|
||||
|
||||
presets = []
|
||||
for preset_file in presets_dir.glob("*.conf"):
|
||||
preset_key = preset_file.stem
|
||||
config_name, config_description = parse_preset_metadata(preset_file)
|
||||
|
||||
if config_name is None:
|
||||
config_name = preset_key.replace('-', ' ').title()
|
||||
if config_description is None:
|
||||
config_description = f"Configuration preset: {preset_key}"
|
||||
|
||||
presets.append((preset_key, config_name, config_description))
|
||||
|
||||
# Sort presets, but ensure 'none' comes first
|
||||
presets.sort(key=lambda x: (0 if x[0] == 'none' else 1, x[0]))
|
||||
|
||||
for preset_key, config_name, config_description in presets:
|
||||
print(f"{preset_key}\t{config_name}\t{config_description}")
|
||||
|
||||
|
||||
def get_preset_info(presets_dir: Path, preset_key: str):
|
||||
"""Get information for a specific preset."""
|
||||
preset_file = presets_dir / f"{preset_key}.conf"
|
||||
config_name, config_description = parse_preset_metadata(preset_file)
|
||||
|
||||
if config_name is None:
|
||||
config_name = preset_key.replace('-', ' ').title()
|
||||
if config_description is None:
|
||||
config_description = f"Configuration preset: {preset_key}"
|
||||
|
||||
print(f"{config_name}\t{config_description}")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Parse configuration preset metadata")
|
||||
parser.add_argument("command", choices=["list", "info"], help="Command to execute")
|
||||
parser.add_argument("--presets-dir", default="./config/presets", help="Presets directory")
|
||||
parser.add_argument("--preset", help="Preset name for 'info' command")
|
||||
|
||||
args = parser.parse_args()
|
||||
presets_dir = Path(args.presets_dir)
|
||||
|
||||
if args.command == "list":
|
||||
list_presets(presets_dir)
|
||||
elif args.command == "info":
|
||||
if not args.preset:
|
||||
print("Error: --preset required for 'info' command", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
get_preset_info(presets_dir, args.preset)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
139
scripts/python/setup_manifest.py
Executable file
139
scripts/python/setup_manifest.py
Executable file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Utility commands for setup.sh to read module manifest metadata.
|
||||
"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Iterable, List
|
||||
|
||||
|
||||
def load_manifest(path: str) -> dict:
|
||||
manifest_path = Path(path)
|
||||
if not manifest_path.is_file():
|
||||
print(f"ERROR: Module manifest not found at {manifest_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
try:
|
||||
return json.loads(manifest_path.read_text())
|
||||
except json.JSONDecodeError as exc:
|
||||
print(f"ERROR: Failed to parse manifest {manifest_path}: {exc}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def iter_modules(manifest: dict) -> Iterable[dict]:
|
||||
modules = manifest.get("modules") or []
|
||||
for entry in modules:
|
||||
if isinstance(entry, dict) and entry.get("key"):
|
||||
yield entry
|
||||
|
||||
|
||||
def unique_preserve_order(values: Iterable[str]) -> List[str]:
|
||||
seen = set()
|
||||
ordered: List[str] = []
|
||||
for value in values:
|
||||
if not value:
|
||||
continue
|
||||
if value not in seen:
|
||||
seen.add(value)
|
||||
ordered.append(value)
|
||||
return ordered
|
||||
|
||||
|
||||
def clean(value: str) -> str:
|
||||
if value is None:
|
||||
return "-"
|
||||
text = str(value).replace("\t", " ").replace("\n", " ").strip()
|
||||
return text if text else "-"
|
||||
|
||||
|
||||
def cmd_keys(manifest_path: str) -> None:
|
||||
manifest = load_manifest(manifest_path)
|
||||
for entry in iter_modules(manifest):
|
||||
print(entry["key"])
|
||||
|
||||
|
||||
def cmd_metadata(manifest_path: str) -> None:
|
||||
manifest = load_manifest(manifest_path)
|
||||
for entry in iter_modules(manifest):
|
||||
key = entry["key"]
|
||||
name = clean(entry.get("name", key))
|
||||
module_type_raw = entry.get("type", "")
|
||||
module_type = clean(module_type_raw)
|
||||
needs_build_flag = entry.get("needs_build")
|
||||
if needs_build_flag is None:
|
||||
needs_build = "1" if str(module_type_raw).lower() == "cpp" else "0"
|
||||
else:
|
||||
needs_build = "1" if needs_build_flag else "0"
|
||||
status = clean(entry.get("status", "active"))
|
||||
block_reason = clean(entry.get("block_reason", ""))
|
||||
requires = unique_preserve_order(entry.get("requires") or [])
|
||||
requires_csv = ",".join(requires) if requires else "-"
|
||||
notes = clean(entry.get("notes", ""))
|
||||
description = clean(entry.get("description", ""))
|
||||
category = clean(entry.get("category", ""))
|
||||
special_message = clean(entry.get("special_message", ""))
|
||||
repo = clean(entry.get("repo", ""))
|
||||
print(
|
||||
"\t".join(
|
||||
[
|
||||
key,
|
||||
name,
|
||||
needs_build,
|
||||
module_type if module_type != "" else "-",
|
||||
status,
|
||||
block_reason,
|
||||
requires_csv,
|
||||
notes,
|
||||
description,
|
||||
category,
|
||||
special_message,
|
||||
repo,
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def cmd_sorted_keys(manifest_path: str) -> None:
|
||||
manifest = load_manifest(manifest_path)
|
||||
modules = list(iter_modules(manifest))
|
||||
modules.sort(
|
||||
key=lambda item: (
|
||||
# Primary sort by order (default to 5000 if not specified)
|
||||
item.get("order", 5000),
|
||||
# Secondary sort by type
|
||||
str(item.get("type", "")),
|
||||
# Tertiary sort by name (case insensitive)
|
||||
str(item.get("name", item.get("key", ""))).lower(),
|
||||
)
|
||||
)
|
||||
for entry in modules:
|
||||
print(entry["key"])
|
||||
|
||||
|
||||
COMMAND_MAP = {
|
||||
"keys": cmd_keys,
|
||||
"metadata": cmd_metadata,
|
||||
"sorted-keys": cmd_sorted_keys,
|
||||
}
|
||||
|
||||
|
||||
def main(argv: List[str]) -> int:
|
||||
if len(argv) != 3:
|
||||
print(f"Usage: {argv[0]} <command> <manifest-path>", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
command = argv[1]
|
||||
manifest_path = argv[2]
|
||||
handler = COMMAND_MAP.get(command)
|
||||
if handler is None:
|
||||
valid = ", ".join(sorted(COMMAND_MAP))
|
||||
print(f"Unknown command '{command}'. Valid commands: {valid}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
handler(manifest_path)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
||||
109
scripts/python/setup_profiles.py
Executable file
109
scripts/python/setup_profiles.py
Executable file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Expose profile metadata for setup.sh.
|
||||
|
||||
Profiles are JSON documents with at least:
|
||||
{
|
||||
"modules": ["MODULE_FOO", "MODULE_BAR"],
|
||||
"label": "...", # optional
|
||||
"description": "..." # optional
|
||||
}
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Iterable, List, Tuple
|
||||
|
||||
|
||||
def normalize_modules(raw_modules: Iterable[str], profile: Path) -> List[str]:
|
||||
"""Return a cleaned list of module identifiers."""
|
||||
modules: List[str] = []
|
||||
for item in raw_modules:
|
||||
if not isinstance(item, str):
|
||||
raise ValueError(f"Profile {profile.name}: module entries must be strings")
|
||||
value = item.strip()
|
||||
if not value:
|
||||
continue
|
||||
modules.append(value)
|
||||
if not modules:
|
||||
raise ValueError(f"Profile {profile.name}: modules list cannot be empty")
|
||||
return modules
|
||||
|
||||
|
||||
def sanitize(text: str | None) -> str:
|
||||
if not text:
|
||||
return ""
|
||||
return str(text).replace("\t", " ").replace("\n", " ").strip()
|
||||
|
||||
|
||||
def load_profile(path: Path) -> Tuple[str, List[str], str, str, int]:
|
||||
try:
|
||||
data = json.loads(path.read_text())
|
||||
except json.JSONDecodeError as exc:
|
||||
raise ValueError(f"Profile {path.name}: invalid JSON - {exc}") from exc
|
||||
|
||||
raw_modules = data.get("modules")
|
||||
if not isinstance(raw_modules, list):
|
||||
raise ValueError(f"Profile {path.name}: 'modules' must be a list")
|
||||
|
||||
modules = normalize_modules(raw_modules, path)
|
||||
name = data.get("name") or path.stem
|
||||
label = sanitize(data.get("label")) or " ".join(part.capitalize() for part in name.replace("-", " ").split())
|
||||
description = sanitize(data.get("description"))
|
||||
|
||||
order_raw = data.get("order")
|
||||
try:
|
||||
order = int(order_raw) if order_raw is not None else 10000
|
||||
except (TypeError, ValueError):
|
||||
raise ValueError(f"Profile {path.name}: 'order' must be an integer") from None
|
||||
|
||||
return name, modules, label, description, order
|
||||
|
||||
|
||||
def cmd_list(directory: Path) -> int:
|
||||
if not directory.is_dir():
|
||||
print(f"ERROR: Profile directory not found: {directory}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
profiles: List[Tuple[str, List[str], str, str, int]] = []
|
||||
for candidate in sorted(directory.glob("*.json")):
|
||||
try:
|
||||
profiles.append(load_profile(candidate))
|
||||
except ValueError as exc:
|
||||
print(f"ERROR: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
profiles.sort(key=lambda item: item[4])
|
||||
|
||||
for name, modules, label, description, order in profiles:
|
||||
modules_csv = ",".join(modules)
|
||||
print("\t".join([name, modules_csv, label, description, str(order)]))
|
||||
return 0
|
||||
|
||||
|
||||
COMMANDS = {
|
||||
"list": cmd_list,
|
||||
}
|
||||
|
||||
|
||||
def main(argv: List[str]) -> int:
|
||||
if len(argv) != 3:
|
||||
print(f"Usage: {argv[0]} <command> <profiles-dir>", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
command = argv[1]
|
||||
handler = COMMANDS.get(command)
|
||||
if handler is None:
|
||||
valid = ", ".join(sorted(COMMANDS))
|
||||
print(f"Unknown command '{command}'. Valid commands: {valid}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
directory = Path(argv[2])
|
||||
return handler(directory)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
||||
Reference in New Issue
Block a user