Source code for batools.metamakefile
# Released under the MIT License. See LICENSE for details.
#
"""Procedurally regenerates our code Makefile.
This Makefiles builds our generated code such as encrypted python strings,
node types, etc).
"""
from __future__ import annotations
import os
import json
from pathlib import Path
from typing import TYPE_CHECKING
from dataclasses import dataclass
from efro.error import CleanError
from efrotools.project import getprojectconfig
if TYPE_CHECKING:
pass
# These paths need to be relative to the dir we're writing the Makefile to.
PROJ_DIR = '../..'
TOOLS_DIR = f'{PROJ_DIR}/tools'
PROJ_SRC_DIR = '..'
# These should only be used for make targets since they use makefile
# vars. Our makefile vars have the same names as above. We could inline
# vars ourself but it's nice to build a makefile that feels like one
# we'd build by hand.
OUT_DIR_ROOT_CPP = '$(PROJ_SRC_DIR)/ballistica'
OUT_DIR_BASE_PYTHON = '$(PROJ_SRC_DIR)/assets/ba_data/python/babase/_mgen'
[docs]
@dataclass
class Target:
"""A target to be added to the Makefile."""
src: list[str]
dst: str
cmd: str
mkdir: bool = False
[docs]
def emit(self) -> str:
"""Gen a Makefile target."""
out: str = self.dst.replace(' ', '\\ ')
out += (
' : '
+ ' '.join(s for s in self.src)
+ (
('\n\t@mkdir -p "' + os.path.dirname(self.dst) + '"')
if self.mkdir
else ''
)
+ '\n\t@'
+ self.cmd
+ '\n'
)
return out
[docs]
def generate_meta_makefile(projroot: str, existing_data: str) -> dict[str, str]:
"""Update the project meta Makefile.
Returns file names and contents.
"""
return MetaMakefileGenerator(projroot, existing_data).run()
[docs]
class MetaMakefileGenerator:
"""Thing that does the thing."""
def __init__(self, projroot: str, existing_data: str) -> None:
from batools.featureset import FeatureSet
self._existing_data = existing_data
self._projroot = projroot
self._featuresets = FeatureSet.get_all_for_project(projroot)
[docs]
def run(self) -> dict[str, str]:
"""Do the thing."""
# pylint: disable=too-many-locals
public = getprojectconfig(Path(self._projroot))['public']
assert isinstance(public, bool)
fname = 'src/meta/Makefile'
fname_pub_man = 'src/meta/.meta_manifest_public.json'
fname_priv_man = 'src/meta/.meta_manifest_private.json'
original = self._existing_data
lines = original.splitlines()
# We'll generate manifests of all public/private files we
# generate (not private-internal though).
all_dsts_public: set[str] = set()
all_dsts_private: set[str] = set()
auto_start_public = lines.index('# __AUTOGENERATED_PUBLIC_BEGIN__')
auto_end_public = lines.index('# __AUTOGENERATED_PUBLIC_END__')
auto_start_private = lines.index('# __AUTOGENERATED_PRIVATE_BEGIN__')
auto_end_private = lines.index('# __AUTOGENERATED_PRIVATE_END__')
# Public targets (stuff with full sources available in public
# repo).
targets: list[Target] = []
pubtargets = targets
self._add_monolithic_register_modules_target(targets)
self._add_pyembed_targets(targets)
# Base feature set bits.
if os.path.exists(
f'{self._projroot}/config/featuresets/featureset_base.py'
):
self._add_init_module_target(targets, moduledir=OUT_DIR_BASE_PYTHON)
self._add_base_enums_module_target(targets)
our_lines_public = (
_empty_line_if(bool(targets))
+ self._emit_sources_lines(targets)
+ [t.emit() for t in targets]
)
all_dsts_public.update(t.dst for t in targets)
# Only rewrite the private section in the private repo;
# otherwise keep the existing one intact.
if public:
our_lines_private = lines[auto_start_private + 1 : auto_end_private]
else:
# Private targets (but available in public through
# efrocache).
targets = []
our_lines_private_1 = (
_empty_line_if(bool(targets))
+ self._emit_sources_lines(targets)
+ ['# __EFROCACHE_TARGET__\n' + t.emit() for t in targets]
+ [
'\n#'
' Note: we include our public targets in efrocache even\n'
'# though they are buildable in public. This allows us to\n'
'# fetch them to bootstrap binary builds in cases where\n'
'# we can\'t use our full Makefiles (like Windows CI).\n'
]
+ self._emit_efrocache_lines(pubtargets + targets)
)
all_dsts_private.update(t.dst for t in targets)
# Private-internal targets (not available at all in public).
targets = []
self._add_pyembed_targets_internal(targets)
self._add_extra_targets_internal(targets)
our_lines_private_2 = (
['# __PUBSYNC_STRIP_BEGIN__']
+ _empty_line_if(bool(targets))
+ self._emit_sources_lines(targets)
+ [t.emit() for t in targets]
+ ['# __PUBSYNC_STRIP_END__']
)
our_lines_private = our_lines_private_1 + our_lines_private_2
filtered = (
lines[: auto_start_public + 1]
+ our_lines_public
+ lines[auto_end_public : auto_start_private + 1]
+ our_lines_private
+ lines[auto_end_private:]
)
out = '\n'.join(filtered) + '\n'
out_files: dict[str, str] = {}
out_pub_man = json.dumps(
sorted(self._filter_manifest_path(p) for p in all_dsts_public),
indent=1,
)
out_priv_man = json.dumps(
sorted(self._filter_manifest_path(p) for p in all_dsts_private),
indent=1,
)
out_files[fname] = out
out_files[fname_pub_man] = out_pub_man
out_files[fname_priv_man] = out_priv_man
return out_files
def _emit_sources_lines(self, targets: list[Target]) -> list[str]:
"""Gen lines to build provided targets."""
out: list[str] = []
if not targets:
return out
all_dsts = set()
for target in targets:
all_dsts.add(target.dst)
out.append(
'sources: \\\n '
+ ' \\\n '.join(
dst.replace(' ', '\\ ') for dst in sorted(all_dsts)
)
+ '\n'
)
return out
def _emit_efrocache_lines(self, targets: list[Target]) -> list[str]:
"""Gen lines to cache provided targets."""
out: list[str] = []
if not targets:
return out
all_dsts = set()
for target in targets:
# We may need to make pipeline adjustments if/when we get
# filenames with spaces in them.
if ' ' in target.dst:
raise CleanError(
'FIXME: need to account for spaces in filename'
f' "{target.dst}".'
)
all_dsts.add(target.dst)
out.append(
'efrocache-list:\n\t@echo '
+ ' \\\n '.join('"' + dst + '"' for dst in sorted(all_dsts))
+ '\n'
)
out.append('efrocache-build: sources\n')
return out
def _add_base_enums_module_target(self, targets: list[Target]) -> None:
targets.append(
Target(
src=[
'$(PROJ_DIR)/src/ballistica/shared/foundation/types.h',
'$(TOOLS_DIR)/batools/enumspython.py',
],
dst=os.path.join(OUT_DIR_BASE_PYTHON, 'enums.py'),
cmd='$(PCOMMAND) gen_python_enums_module $< $@',
)
)
def _add_init_module_target(
self, targets: list[Target], moduledir: str
) -> None:
targets.append(
Target(
src=['$(TOOLS_DIR)/batools/pcommands.py'],
dst=os.path.join(moduledir, '__init__.py'),
cmd='$(PCOMMAND) gen_python_init_module $@',
)
)
def _add_monolithic_register_modules_target(
self, targets: list[Target]
) -> None:
# When any of our featuresets configs changes, rebuild our
# snippet of code that registers them all.
featureset_fnames = [
n
for n in os.listdir(
os.path.join(self._projroot, 'config/featuresets')
)
if n.startswith('featureset_') and n.endswith('.py')
]
targets.append(
Target(
src=[
f'$(PROJ_DIR)/config/featuresets/{n}'
for n in sorted(featureset_fnames)
],
dst=f'{OUT_DIR_ROOT_CPP}/core/mgen/python_modules_monolithic.h',
cmd='$(PCOMMAND) gen_monolithic_register_modules $@',
)
)
def _add_featureset_entries(
self, entries: list[tuple[str, str]], internal: bool
) -> None:
featuresets = [f for f in self._featuresets if internal == f.internal]
# For featureset 'foo_bar', stuff under 'bafoobarmeta' goes into
# 'ballistica/foo_bar/mgen'.
for featureset in featuresets:
entries.append(
(
featureset.name_python_package_meta,
os.path.join(OUT_DIR_ROOT_CPP, featureset.name, 'mgen'),
)
)
def _create_featureset_targets(
self,
entries: list[tuple[str, str]],
targets: list[Target],
internal: bool,
) -> None:
for pkg, out_dir in entries:
base_src_dir = os.path.join(self._projroot, f'src/meta/{pkg}')
if not os.path.exists(base_src_dir):
continue
# Note: sort to keep things deterministic.
for fname in sorted(os.listdir(f'{base_src_dir}/pyembed')):
if (
not fname.endswith('.py')
or fname == '__init__.py'
or 'flycheck' in fname
):
continue
name = os.path.splitext(fname)[0]
src = [
f'{pkg}/pyembed/{name}.py',
]
dst = os.path.join(out_dir, 'pyembed', f'{name}.inc')
if name.startswith('binding_'):
targets.append(
Target(
src=src,
dst=dst,
cmd='$(PCOMMAND) gen_binding_code $< $@',
)
)
else:
if internal:
targets.append(
Target(
src=src,
dst=dst,
cmd=(
'$(PCOMMAND) gen_encrypted_python_code'
' $< $@'
),
)
)
else:
targets.append(
Target(
src=src,
dst=dst,
cmd=f'$(PCOMMAND) gen_flat_data_code'
f' $< $@ {name}_code',
)
)
def _add_pyembed_targets(self, targets: list[Target]) -> None:
entries: list[tuple[str, str]] = []
# Map stuff from other featureset meta packages to a mgen dir
# under their C++ root.
self._add_featureset_entries(entries, internal=False)
self._create_featureset_targets(entries, targets, internal=False)
def _add_pyembed_targets_internal(self, targets: list[Target]) -> None:
entries: list[tuple[str, str]] = []
self._add_featureset_entries(entries, internal=True)
self._create_featureset_targets(entries, targets, internal=True)
def _add_extra_targets_internal(self, targets: list[Target]) -> None:
if os.path.exists(
f'{self._projroot}/config/featuresets/featureset_plus.py'
):
# Add targets to generate message sender/receiver classes
# for our basn/client protocols. Their outputs go to 'mgen'
# so they don't get added to git.
self._add_init_module_target(targets, moduledir='baplusmeta/mgen')
for srcname, dstname, gencmd in [
('batocloud', 'basnmessagesender', 'gen_basn_msg_sender'),
('cloudtoba', 'basnmessagereceiver', 'gen_basn_msg_receiver'),
]:
targets.append(
Target(
src=[f'baplusmeta/pyembed/{srcname}.py'],
dst=f'baplusmeta/mgen/{dstname}.py',
cmd=f'$(PCOMMAND) {gencmd} $@',
)
)
# Now add explicit targets to generate embedded code for the
# resulting classes. We can't simply place them in a scanned
# dir like pyembed because they might not exist yet at
# update time.
for name in ['basnmessagesender', 'basnmessagereceiver']:
targets.append(
Target(
src=[f'baplusmeta/mgen/{name}.py'],
dst=os.path.join(
OUT_DIR_ROOT_CPP,
'plus',
'mgen',
'pyembed',
f'{name}.inc',
),
cmd='$(PCOMMAND) gen_encrypted_python_code $< $@',
)
)
def _filter_manifest_path(self, path: str) -> str:
"""Given a path we dumped into our makefile, generate an abs one."""
# Our makefile paths contain vars to be subbed by the makefile.
# We need to do those same subs now.
for pair in [
('$(PROJ_DIR)', PROJ_DIR),
('$(TOOLS_DIR)', TOOLS_DIR),
('$(PROJ_SRC_DIR)', PROJ_SRC_DIR),
]:
path = path.replace(pair[0], pair[1])
projpath = f'{self._projroot}/'
assert '\\' not in projpath # Don't expect to work on windows.
abspath = os.path.abspath(
os.path.join(self._projroot, 'src', 'meta', path)
)
if not abspath.startswith(projpath):
raise RuntimeError(
f'Path "{abspath}" is not under project root "{projpath}"'
)
return abspath[len(projpath) :]
def _empty_line_if(condition: bool) -> list[str]:
return [''] if condition else []
# Docs-generation hack; import some stuff that we likely only forward-declared
# in our actual source code so that docs tools can find it.
from typing import (Coroutine, Any, Literal, Callable,
Generator, Awaitable, Sequence, Self)
import asyncio
from concurrent.futures import Future
from pathlib import Path
from enum import Enum