# Released under the MIT License. See LICENSE for details.
#
"""A nice collection of ready-to-use pcommands for this package."""
from __future__ import annotations
# Note: import as little as possible here at the module level to
# keep launch times fast for small snippets.
import sys
from typing import TYPE_CHECKING
from efrotools import pcommand
if TYPE_CHECKING:
from libcst import BaseExpression
from libcst.metadata import CodeRange
#: Root under which ``test_game_run`` stores per-instance silo dirs.
#: Each silo is self-contained state for one running instance
#: (``ba_root/``, ``pid``, ``out``, etc.) so multiple instances can run
#: concurrently without stepping on each other.
_TEST_GAME_SILO_ROOT = 'build/test_run'
#: Default UDP port for newly-created server silos. Override with
#: ``--port`` at silo creation time (or edit the silo's ``config.toml``
#: after the fact) when running multiple servers concurrently.
_TEST_GAME_DEFAULT_PORT = 43210
def _test_game_run_parse_args() -> dict[str, object]:
"""Parse ``test_game_run`` argv.
Returns a dict with keys ``instance``, ``log_levels``, ``timeout``,
``fleet``, ``headless``, ``exec_code``, and ``port``. Default is a
headless server build; pass ``--gui`` for an interactive GUI
client build.
"""
args = sys.argv[2:]
out: dict[str, object] = {
'instance': 'default',
'log_levels': '',
'timeout': 10,
'fleet': '',
'headless': True,
'exec_code': None,
'port': None,
'ex_build': False,
'user_data': False,
}
while args:
if args[0] == '--instance' and len(args) > 1:
out['instance'] = args[1]
args = args[2:]
elif args[0] == '--log' and len(args) > 1:
out['log_levels'] = args[1]
args = args[2:]
elif args[0] == '--timeout' and len(args) > 1:
out['timeout'] = int(args[1])
args = args[2:]
elif args[0] == '--fleet' and len(args) > 1:
out['fleet'] = args[1]
args = args[2:]
elif args[0] == '--gui':
out['headless'] = False
args = args[1:]
elif args[0] == '--exec' and len(args) > 1:
out['exec_code'] = args[1]
args = args[2:]
elif args[0] == '--port' and len(args) > 1:
out['port'] = int(args[1])
args = args[2:]
elif args[0] == '--ex':
out['ex_build'] = True
args = args[1:]
elif args[0] == '--user-data':
out['user_data'] = True
args = args[1:]
else:
raise RuntimeError(f'Unexpected arg: {args[0]}')
if out['port'] is not None and not out['headless']:
raise RuntimeError('--port only applies to headless instances.')
if out['user_data'] and out['headless']:
raise RuntimeError(
'--user-data is GUI-only; headless has no per-user data dir.'
)
return out
def _test_game_silo_path(instance: str) -> str:
"""Return the absolute path of an instance's silo dir."""
import os
if not instance or '/' in instance or instance.startswith('.'):
raise RuntimeError(f'Invalid --instance name: {instance!r}')
return os.path.abspath(os.path.join(_TEST_GAME_SILO_ROOT, instance))
def _test_game_write_server_config_toml(
path: str, *, instance: str, port: int
) -> None:
"""Write the per-silo ``config.toml`` for a fresh server instance."""
with open(path, 'w', encoding='utf-8') as f:
f.write(
'# Auto-generated by tools/pcommand test_game_run on first\n'
f'# creation of silo "{instance}". Edits you make here are\n'
'# preserved across subsequent launches of this silo.\n'
f'party_name = "test-{instance}"\n'
'party_is_public = false\n'
f'port = {port}\n'
'\n'
'# Crank verbosity on the loggers most useful for\n'
'# connect/handshake debugging.\n'
'[log_levels]\n'
'"ba.net" = "DEBUG"\n'
'"ba.lifecycle" = "DEBUG"\n'
)
def _test_game_ensure_binary(*, headless: bool, ex_build: bool) -> None:
"""Run the appropriate ``make`` target to bring the binary current.
Lazybuild makes this a near-noop when nothing has changed, and
ensures edits to ``.py`` / ``.cc`` sources actually land in the
binary we're about to launch. Avoids a class of "why isn't my edit
showing up" bugs where editors reach for ``make cmake-build`` but
are actually running the headless binary (or vice versa).
"""
import subprocess
if headless:
target = 'cmake-server-build'
elif ex_build:
target = 'cmake-build-ex'
else:
target = 'cmake-build'
subprocess.run(['make', target], check=True)
def _test_game_ensure_server_silo(
silo: str, *, instance: str, port: int | None
) -> None:
"""Create / refresh the server artifacts inside a silo dir.
Creates ``ba_root/``, symlinks the ``ballisticakit_server`` wrapper
+ its ``dist/`` sibling from the staging dir, and writes a
``config.toml`` template on first use. Assumes the server build has
already been brought current (see ``_test_game_ensure_binary``).
"""
import os
staged = os.path.abspath('build/cmake/server-debug/staged')
staged_wrapper = os.path.join(staged, 'ballisticakit_server')
staged_dist = os.path.join(staged, 'dist')
staged_headless = os.path.join(staged_dist, 'ballisticakit_headless')
assert os.path.exists(
staged_headless
), f'Headless binary missing at {staged_headless} after build.'
assert os.path.exists(
staged_wrapper
), f'Server wrapper missing at {staged_wrapper} after build.'
os.makedirs(silo, exist_ok=True)
os.makedirs(os.path.join(silo, 'ba_root'), exist_ok=True)
# Symlink the wrapper + dist from staging. Refresh if the target
# moved (shouldn't happen, but belt-and-suspenders for rebuilds).
for name, target in (
('ballisticakit_server', staged_wrapper),
('dist', staged_dist),
):
link_path = os.path.join(silo, name)
if os.path.islink(link_path):
if os.readlink(link_path) == target:
continue
os.remove(link_path)
elif os.path.exists(link_path):
raise RuntimeError(
f'{link_path} exists and is not a symlink; '
f'refusing to clobber. Remove it and retry.'
)
os.symlink(target, link_path)
config_path = os.path.join(silo, 'config.toml')
if not os.path.exists(config_path):
_test_game_write_server_config_toml(
config_path,
instance=instance,
port=(port if port is not None else _TEST_GAME_DEFAULT_PORT),
)
elif port is not None:
# Silo already exists. Ignoring --port avoids clobbering the
# user's edits to config.toml.
print(
f'Note: --port ignored for existing silo "{instance}" '
f'(its config.toml is preserved across runs).'
)
def _test_game_ensure_client_silo(silo: str) -> None:
"""Create minimal client-instance state inside a silo dir."""
import os
os.makedirs(silo, exist_ok=True)
os.makedirs(os.path.join(silo, 'ba_root'), exist_ok=True)
def _test_game_build_cmd(
*,
silo: str,
headless: bool,
exec_code: str | None,
ex_build: bool = False,
user_data: bool = False,
) -> tuple[str, list[str]]:
"""Return ``(cwd, cmd)`` for launching the binary against a silo.
If ``ex_build`` is True, target the ``cmake-build-ex`` staged
directory instead of the vanilla ``cmake-build`` one. The ex
build adds optional integrations (Discord SDK, etc.) that aren't
compiled into the default build — needed for automation of
Discord sign-in and similar flows.
If ``user_data`` is True (GUI only), omit ``--config-dir`` so the
binary uses its default per-user data dir — reproduces the
caller's real signed-in state, saved options, etc. instead of
a fresh silo.
"""
import os
if headless:
# Run the wrapper from inside the silo so its subprocess
# ``cwd='dist'`` resolves to the silo's dist symlink (which
# points back at the staging dist). --root and --config are
# absolute so they don't care about the wrapper's cwd. The
# wrapper's own --exec flag forwards to the underlying
# ballisticakit_headless subprocess.
cmd: list[str] = [
'./ballisticakit_server',
'--root',
os.path.join(silo, 'ba_root'),
'--config',
os.path.join(silo, 'config.toml'),
'--noninteractive',
]
if exec_code is not None:
cmd += ['--exec', exec_code]
return (silo, cmd)
# Client: run the GUI binary from its staging dir (so bundled
# ba_data is found) with --config-dir pointing at the silo's
# ba_root. No need to mirror the binary into the silo. Assumes
# the GUI build has already been brought current (see
# ``_test_game_ensure_binary``).
staged_subdir = 'debug-ex' if ex_build else 'debug'
staged = os.path.abspath(f'build/cmake/{staged_subdir}/staged')
binary = os.path.join(staged, 'ballisticakit')
assert os.path.exists(
binary
), f'GUI binary missing at {binary} after build.'
cmd = ['./ballisticakit']
if not user_data:
cmd += ['--config-dir', os.path.join(silo, 'ba_root')]
if exec_code is not None:
cmd += ['--exec', exec_code]
return (staged, cmd)
[docs]
def test_game_run() -> None:
"""Run the game for testing purposes in a siloed instance dir.
Usage::
tools/pcommand test_game_run [--instance NAME] [--gui]
[--port N] [--exec CODE] [--log LEVELS] [--timeout SECONDS]
[--fleet FLEET] [--user-data]
Always runs in the foreground and blocks until the process exits
or ``--timeout`` elapses. Callers that want to supervise multiple
concurrent instances, keep a server alive across sessions, or
manage output on their own should wrap their own invocation (e.g.
a terminal multiplexer, ``nohup ... &``, or the host automation's
background-task mechanism) rather than relying on backgrounding
baked into this command.
Default is a headless server build; the headless binary starts
faster, pops no window, and is the right fit for the programmatic
testing this command is optimized for. Pass ``--gui`` when you
actually want to interact with the UI.
Every run invokes the appropriate ``make`` target first so edits
to ``.py`` / ``.cc`` sources land in the binary we launch. Lazybuild
makes this a near-noop when nothing changed, so there's no need to
manually ``make cmake-build`` / ``make cmake-server-build`` before
running this — the pcommand handles it.
Each instance lives under ``build/test_run/<name>/`` with its own
``ba_root/`` and ``config.toml`` (server only). Silos auto-create
on first use. Run two instances concurrently by passing different
``--instance`` names; give server instances distinct ``--port``
values so they don't fight over UDP 43210.
Flags:
- ``--instance NAME``: Silo name under ``build/test_run/``.
Defaults to ``default``.
- ``--gui``: Launch the interactive GUI client build instead of
the default headless server build.
- ``--port N``: Only valid at silo creation time for headless
instances. Writes the port into the silo's new ``config.toml``.
Ignored (with a note) for pre-existing silos.
- ``--exec CODE``: Python snippet passed via the binary's
``--exec`` flag. Works for both GUI and headless builds;
for headless, ``connect_to_party`` calls are allowed in
developer builds only (shipped builds hard-block this path).
- ``--log LEVELS``: Comma-separated logger=LEVEL pairs (e.g.
``ba.net=DEBUG``). Passed via ``BA_LOG_LEVELS``.
- ``--timeout SECONDS``: Hard timeout before the process is
killed (default 10).
- ``--fleet FLEET``: Override master-server fleet (``prod``,
``test``, ``dev``).
- ``--user-data``: GUI only. Drop ``--config-dir`` so the binary
uses its default per-user data dir — you see the caller's real
signed-in state, saved options, unlocked characters, etc.
instead of the silo's fresh slate. Handy for reproducing
user-reported issues that only manifest with real account
state. Incompatible with headless (servers have no user data
dir in the same sense).
"""
import os
import signal
import subprocess
opts = _test_game_run_parse_args()
instance = opts['instance']
log_levels = opts['log_levels']
timeout = opts['timeout']
fleet = opts['fleet']
headless = opts['headless']
exec_code = opts['exec_code']
port = opts['port']
user_data = opts['user_data']
assert isinstance(instance, str)
assert isinstance(log_levels, str)
assert isinstance(timeout, int)
assert isinstance(fleet, str)
assert isinstance(headless, bool)
assert isinstance(user_data, bool)
assert exec_code is None or isinstance(exec_code, str)
assert port is None or isinstance(port, int)
# Bring the binary current before touching the silo. Lazybuild
# makes this near-instant on the noop case and eliminates the
# stale-binary class of bug where a .py/.cc edit regenerates the
# .inc but the binary we're about to launch was built before that.
_test_game_ensure_binary(headless=headless, ex_build=bool(opts['ex_build']))
silo = _test_game_silo_path(instance)
if headless:
_test_game_ensure_server_silo(silo, instance=instance, port=port)
else:
_test_game_ensure_client_silo(silo)
# Start with a clean screenshots dir per launch so automation
# output always reflects the current session; screenshots don't
# accumulate across reruns of the same silo.
if os.path.isdir(os.path.join(silo, 'screenshots')):
import shutil
shutil.rmtree(os.path.join(silo, 'screenshots'))
cwd, cmd = _test_game_build_cmd(
silo=silo,
headless=headless,
exec_code=exec_code,
ex_build=bool(opts['ex_build']),
user_data=user_data,
)
env = dict(os.environ)
if log_levels:
env['BA_LOG_LEVELS'] = log_levels
if fleet:
env['BA_FLEET'] = fleet
# Hand the silo's automation FIFO path to the binary; it'll create
# the fifo on startup if it doesn't exist and start a reader thread.
# See src/ballistica/base/automation/automation.h. Ignored entirely
# in non-developer builds.
env['BA_AUTOMATION_FIFO'] = os.path.join(silo, 'cmd.fifo')
# Default to loopback-only UDP binding when the runtime proxy
# pattern of Claude Code's sandbox is present. That sandbox denies
# 0.0.0.0 UDP binds, and forcing callers to set this env var
# themselves changes the command signature and triggers extra
# permission prompts. setdefault so an explicit override (including
# BA_BIND_LOOPBACK_ONLY=0) still wins.
all_proxy = os.environ.get('ALL_PROXY', '')
if all_proxy.startswith(('socks5://', 'socks5h://')):
env.setdefault('BA_BIND_LOOPBACK_ONLY', '1')
# Put the child in its own session so a ``--timeout`` SIGTERM can
# reach the wrapper + headless subprocess together via killpg,
# without risking signaling ourselves. Inherit stdout/stderr so
# output streams through in real time; callers that want the
# output in a file should redirect at invocation site.
with subprocess.Popen(
cmd,
cwd=cwd,
stdin=subprocess.DEVNULL,
env=env,
start_new_session=True,
) as proc:
try:
proc.wait(timeout=timeout)
except subprocess.TimeoutExpired:
try:
os.killpg(os.getpgid(proc.pid), signal.SIGTERM)
except ProcessLookupError:
proc.send_signal(signal.SIGTERM)
try:
proc.wait(timeout=5)
except subprocess.TimeoutExpired:
proc.kill()
proc.wait()
[docs]
def compose_docker_gui_release() -> None:
"""Build the docker image with bombsquad cmake gui."""
import batools.docker
batools.docker.docker_compose(headless_build=False)
[docs]
def compose_docker_gui_debug() -> None:
"""Build the docker image with bombsquad debug cmake gui."""
import batools.docker
batools.docker.docker_compose(headless_build=False, build_type='Debug')
[docs]
def compose_docker_server_release() -> None:
"""Build the docker image with bombsquad cmake server."""
import batools.docker
batools.docker.docker_compose()
[docs]
def compose_docker_server_debug() -> None:
"""Build the docker image with bombsquad debug cmake server."""
import batools.docker
batools.docker.docker_compose(build_type='Debug')
[docs]
def compose_docker_arm64_gui_release() -> None:
"""Build the docker image with bombsquad cmake for arm64."""
import batools.docker
batools.docker.docker_compose(headless_build=False, platform='linux/arm64')
[docs]
def compose_docker_arm64_gui_debug() -> None:
"""Build the docker image with bombsquad cmake for arm64."""
import batools.docker
batools.docker.docker_compose(
headless_build=False, platform='linux/arm64', build_type='Debug'
)
[docs]
def compose_docker_arm64_server_release() -> None:
"""Build the docker image with bombsquad cmake server for arm64."""
import batools.docker
batools.docker.docker_compose(platform='linux/arm64')
[docs]
def compose_docker_arm64_server_debug() -> None:
"""Build the docker image with bombsquad cmake server for arm64."""
import batools.docker
batools.docker.docker_compose(platform='linux/arm64', build_type='Debug')
[docs]
def save_docker_images() -> None:
"""Saves bombsquad images loaded into docker."""
import batools.docker
batools.docker.docker_save_images()
[docs]
def remove_docker_images() -> None:
"""Remove the bombsquad images loaded in docker."""
import batools.docker
batools.docker.docker_remove_images()
# pylint: disable=too-many-locals,too-many-statements
[docs]
def generate_flathub_manifest() -> None:
"""Generate a Flathub manifest for Ballistica and push to submodule.
This function is intended to be run within a GitHub Actions workflow.
This function:
1. Copies files from config/flatpak/ to config/flatpak/flathub
2. Generates the manifest from template using latest GitHub release info
"""
import json
import os
import shutil
import urllib.request
import subprocess
from efro.error import CleanError
from efro.terminal import Clr
try:
github_repo = os.environ['GITHUB_REPOSITORY']
except KeyError:
try:
user_plus_repo: list[str] = (
subprocess.run(
'git config remote.origin.url',
check=True,
shell=True,
capture_output=True,
text=True,
)
.stdout.strip(' \n')
.split('/')
)
github_repo = (
user_plus_repo[-2]
+ '/'
+ user_plus_repo[-1].removesuffix('.git')
)
except Exception as e:
raise CleanError(
f'GITHUB_REPOSITORY env var not'
f'set and git remote.origin.url not set.'
f'{e}'
) from e
# Paths
flatpak_src_dir = os.path.join(pcommand.PROJROOT, 'config', 'flatpak')
flathub_dir = os.path.join(pcommand.PROJROOT, 'build', 'flathub')
template_path = os.path.join(
flatpak_src_dir, 'net.froemling.bombsquad.yml.template'
)
os.makedirs(flathub_dir, exist_ok=True)
manifest_path = os.path.join(flathub_dir, 'net.froemling.bombsquad.yml')
print(f'{Clr.BLD}Generating Flathub manifest...{Clr.RST}')
# Step 1: Copy files from config/flatpak/ to config/flatpak/flathub
print(
f'{Clr.BLD}Copying files from {flatpak_src_dir} to '
f'{flathub_dir}...{Clr.RST}'
)
# List of files to copy (skip the flathub directory itself)
files_to_copy = [
'net.froemling.bombsquad.metainfo.xml',
'net.froemling.bombsquad.desktop',
'net.froemling.bombsquad.releases.xml',
]
for filename in files_to_copy:
src = os.path.join(flatpak_src_dir, filename)
dst = os.path.join(flathub_dir, filename)
if os.path.exists(src):
shutil.copy2(src, dst)
print(f' Copied {filename}')
else:
print(f' Warning: {filename} not found at {src}')
# Step 2: Get latest release information from GitHub
print(f'{Clr.BLD}Fetching latest GitHub release info...{Clr.RST}')
try:
api_url = f'https://api.github.com/repos/{github_repo}/releases/latest'
req = urllib.request.Request(api_url)
with urllib.request.urlopen(req) as response:
release_data = json.loads(response.read().decode())
# Find the bombsquad_build_env.tar asset
asset: dict = {}
asset_url = None
asset_name = 'bombsquad_build_env.tar'
for asset in release_data.get('assets', []):
if asset['name'] == asset_name:
asset_url = asset['browser_download_url']
break
if not asset_url:
raise CleanError(
f'Could not find {asset_name} in latest release assets'
)
print(f' Found asset: {asset_url}')
# Extract version from release tag
version = release_data.get('tag_name', '').lstrip('v')
if not version:
raise CleanError('Could not extract version from release tag')
print(f' Release version: {version}')
# Extract release date from published_at field
release_date = release_data.get('published_at', '')
if not release_date:
raise CleanError('Could not extract release date from API')
# Convert ISO format date (e.g., '2026-01-25T12:34:56Z')
# to YYYY-MM-DD
release_date = release_date.split('T')[0]
print(f' Release date: {release_date}')
print(f'{Clr.BLD}Getting SHA256 checksum...{Clr.RST}')
digest = asset.get('digest')
if not digest or not digest.startswith('sha256:'):
msg = 'No SHA256 digest found in GitHub release asset'
raise CleanError(msg)
checksum = digest.split(':', 1)[1]
except Exception as e:
raise CleanError(f'Failed to fetch release info: {e}') from e
print(f'{Clr.BLD}Generating manifest from template...{Clr.RST}')
with open(template_path, 'r', encoding='utf-8') as infile:
template = infile.read()
def _remove_comments_from_xml_template(content: str) -> str:
import re
# Pattern matches lines that start with optional spaces/tabs then '#'
# This removes the entire line including the newline
pattern = r'^\s*#.*$\n?'
result = re.sub(pattern, '', content, flags=re.MULTILINE)
return result
template = _remove_comments_from_xml_template(template)
# Replace placeholders
manifest_content = template.replace('{ ARCHIVE_URL }', asset_url)
manifest_content = manifest_content.replace('{ SHA256_CHECKSUM }', checksum)
with open(manifest_path, 'w', encoding='utf-8') as outfile:
outfile.write(manifest_content)
print(f' Generated manifest at {manifest_path}')
# Call generate_flatpak_release_manifest with
# the extracted version, repo URL, and date
print(f'{Clr.BLD}Generating Flatpak release manifest...{Clr.RST}')
generate_flatpak_release_manifest(
version, asset_url, checksum, github_repo, release_date
)
print(f'{Clr.BLD}{Clr.GRN}Flathub manifest generation complete!{Clr.RST}')
# pylint: disable=too-many-locals
[docs]
def generate_flatpak_release_manifest(
version: str,
asset_url: str,
checksum: str,
github_repo: str,
release_date: str,
) -> None:
"""Generate a Flatpak release manifest for Ballistica.
This function:
1. Adds a new release entry to net.froemling.bombsquad.releases.xml
2. Updates the net.froemling.bombsquad.releases.xml file with the
new release information
Args:
version: Version string from GitHub release (e.g., '1.7.60')
asset_url: URL to the release asset
checksum: SHA256 checksum of the release asset
github_repo: GitHub repository in format 'owner/repo'
release_date: Release date in YYYY-MM-DD format
"""
import os
from xml.etree import ElementTree as ET
from efro.error import CleanError
from efro.terminal import Clr
from batools.changelog import get_version_changelog
# Paths
flathub_dir = os.path.join(pcommand.PROJROOT, 'build', 'flathub')
releases_xml_path = os.path.join(
flathub_dir, 'net.froemling.bombsquad.releases.xml'
)
print(f'{Clr.BLD}Adding release {version} to releases.xml...{Clr.RST}')
# Parse the existing releases.xml
if not os.path.exists(releases_xml_path):
raise CleanError(f'releases.xml not found at {releases_xml_path}')
try:
tree = ET.parse(releases_xml_path)
root = tree.getroot()
except ET.ParseError as e:
raise CleanError(f'Failed to parse releases.xml: {e}') from e
# Check if release with this version already exists
existing_release = root.find(f".//release[@version='{version}']")
if existing_release is not None:
print(
f'{Clr.YLW}Warning: Release {version} '
f'already exists in releases.xml, skipping...{Clr.RST}'
)
return
# Create new release element
new_release = ET.Element('release')
new_release.set('version', version)
new_release.set('date', release_date)
new_release.set('urgency', 'low')
new_release.set('type', 'stable')
# Add description
description = ET.SubElement(new_release, 'description')
changelog_list = get_version_changelog(
version, projroot=str(pcommand.PROJROOT)
)
ul = ET.SubElement(description, 'ul')
for line in changelog_list:
li = ET.SubElement(ul, 'li')
li.text = line
# Add URL element for release page
release_url = ET.SubElement(new_release, 'url')
release_url.text = (
f'https://github.com/{github_repo}/releases/tag/v{version}'
)
# Add artifacts section with binary information
artifacts = ET.SubElement(new_release, 'artifacts')
# Add source artifact
source_artifact = ET.SubElement(artifacts, 'artifact')
source_artifact.set('type', 'source')
source_location = ET.SubElement(source_artifact, 'location')
source_location.text = (
f'https://github.com/{github_repo}/archive/refs/tags/v{version}.tar.gz'
)
# Add binary artifact for linux
binary_artifact = ET.SubElement(artifacts, 'artifact')
binary_artifact.set('type', 'source')
binary_artifact.set('platform', 'x86_64-linux-gnu')
binary_location = ET.SubElement(binary_artifact, 'location')
binary_location.text = asset_url
binary_checksum = ET.SubElement(binary_artifact, 'checksum')
binary_checksum.set('type', 'sha256')
binary_checksum.text = checksum
# Insert the new release at the beginning (after the root element)
root.insert(0, new_release)
# Format the XML with proper indentation
def _indent(elem: ET.Element[str], level: int = 0) -> None:
"""Add pretty-printing indentation to XML tree."""
indent_str = '\n' + (' ' * level)
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = indent_str + ' '
if not elem.tail or not elem.tail.strip():
elem.tail = indent_str
child: ET.Element | None = None
for child in elem:
_indent(child, level + 1)
if child and (not child.tail or not child.tail.strip()):
child.tail = indent_str
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = indent_str
_indent(root)
# Write back to file
try:
tree.write(releases_xml_path, encoding='utf-8', xml_declaration=True)
print(f' Added release {version} to releases.xml')
print(f' Generated flatpak release manifest at {releases_xml_path}')
print(
f'{Clr.BLD}{Clr.GRN}Flatpak release manifest '
f'generation complete!{Clr.RST}'
)
except Exception as e:
raise CleanError(f'Failed to write releases.xml: {e}') from e
[docs]
def gen_pyembed() -> None:
"""Gen a pyembed .inc file using compiled bytecode.
Args: <in_path> <out_path> [encrypt={0,1}] [ctx=<var_name>]
Replaces gen_encrypted_python_code (encrypt=1) and
gen_flat_data_code (encrypt=0) for pyembed modules.
"""
from efro.error import CleanError
from batools.meta import gen_pyembed as gen
if len(sys.argv) < 4:
raise CleanError(
'Expected at least 2 args: <in_path> <out_path> '
'[encrypt={0,1}] [ctx=<var>]'
)
encrypt = True
ctx_var = 'internal_py_context'
for arg in sys.argv[4:]:
if arg.startswith('encrypt='):
encrypt = arg.removeprefix('encrypt=') == '1'
elif arg.startswith('ctx='):
ctx_var = arg.removeprefix('ctx=')
else:
raise CleanError(f'Unrecognized arg: {arg!r}')
gen(
projroot=str(pcommand.PROJROOT),
in_path=sys.argv[2],
out_path=sys.argv[3],
encrypt=encrypt,
ctx_var=ctx_var,
)
# Docs-generation hack; import some stuff that we likely only forward-declared
# in our actual source code so that docs tools can find it.
from typing import (Coroutine, Any, Literal, Callable,
Generator, Awaitable, Sequence, Self)
import asyncio
from concurrent.futures import Future
from pathlib import Path
from enum import Enum