Source code for batools.pcommands2

# Released under the MIT License. See LICENSE for details.
#
"""A nice collection of ready-to-use pcommands for this package."""

from __future__ import annotations

# Note: import as little as possible here at the module level to
# keep launch times fast for small snippets.
import sys
from typing import TYPE_CHECKING

from efrotools import pcommand

if TYPE_CHECKING:
    from libcst import BaseExpression
    from libcst.metadata import CodeRange


[docs] def gen_builtin_asset_ids() -> None: """Splice C++ id-enums / load-block into base.h / assets.cc. Reads the cached bundle manifest under ``.cache/asset_bundle/gui/`` and splices generated content into the ``// __AUTOGENERATED_BUILTIN_ASSET_IDS_*__`` autogen section in ``src/ballistica/base/base.h`` and the ``// __AUTOGENERATED_BUILTIN_ASSET_LOAD_*__`` section inside ``Assets::StartLoading()`` in ``src/ballistica/base/assets/assets.cc``. Idempotent — only rewrites a file if its spliced content differs from on-disk. Normally invoked indirectly via ``make update`` (this is the one-phase-that-can-modify-checked-in-files per the build system design); standalone invocation exists for manual regen. Pass ``--check`` to fail (without writing) if either file would change. """ from pathlib import Path from batools.builtinassetids import generate check = '--check' in sys.argv changed = generate(Path(pcommand.PROJROOT), check=check) if check and changed: from efro.error import CleanError raise CleanError( 'Builtin-asset id autogen sections are out of date; ' "run 'make update' to regenerate." )
[docs] def gen_monolithic_register_modules() -> None: """Generate .h file for registering py modules.""" import os import textwrap from efro.error import CleanError from batools.featureset import FeatureSet if len(sys.argv) != 3: raise CleanError('Expected 1 arg.') outpath = sys.argv[2] featuresets = FeatureSet.get_all_for_project(str(pcommand.PROJROOT)) # Filter out ones without native modules. featuresets = [f for f in featuresets if f.has_python_binary_module] pymodulenames = sorted(f.name_python_binary_module for f in featuresets) def initname(mname: str) -> str: # plus is a special case since we need to define that symbol # ourself. return f'DoPyInit_{mname}' if mname == '_baplus' else f'PyInit_{mname}' extern_def_code = '\n'.join( f'auto {initname(n)}() -> PyObject*;' for n in pymodulenames ) py_register_code = '\n'.join( f'PyImport_AppendInittab("{n}", &{initname(n)});' for n in pymodulenames ) if '_baplus' in pymodulenames: init_plus_code = ( '\n' '// Slight hack: because we are currently building baplus as a' ' static module\n' '// and linking it in, symbols exported there (namely' ' PyInit__baplus) do not\n' '// seem to be available through us when we are compiled as' ' a dynamic\n' '// library. This leads to Python being unable to load baplus.' ' While I\'m sure\n' '// there is some way to get those symbols exported, I\'m worried' ' it might be\n' '// a messy platform-specific affair. So instead we\'re just' ' defining that\n' '// function here when baplus is present and forwarding it through' ' to the\n' '// static library version.\n' 'extern "C" auto PyInit__baplus() -> PyObject* {\n' ' return DoPyInit__baplus();\n' '}\n' ) else: init_plus_code = '' base_code = """ // Released under the MIT License. See LICENSE for details. #ifndef BALLISTICA_CORE_GENERATED_PYTHON_MODULES_MONOLITHIC_H_ #define BALLISTICA_CORE_GENERATED_PYTHON_MODULES_MONOLITHIC_H_ // THIS CODE IS AUTOGENERATED BY CODEGEN BUILD; DO NOT EDIT BY HAND. #include "ballistica/shared/ballistica.h" #include "ballistica/shared/python/python_macros.h" extern "C" { ${EXTERN_DEF_CODE} } namespace ballistica { /// Register init calls for all of our built-in Python modules. /// Should only be used in monolithic builds. In modular builds /// binary modules get located as .so files on disk as per regular /// Python behavior. void MonolithicRegisterPythonModules() { if (g_buildconfig.monolithic_build()) { ${PY_REGISTER_CODE} } else { FatalError( "MonolithicRegisterPythonModules should not be called" " in modular builds."); } } ${PY_INIT_PLUS} } // namespace ballistica #endif // BALLISTICA_CORE_GENERATED_PYTHON_MODULES_MONOLITHIC_H_ """ out = ( textwrap.dedent(base_code) .replace('${EXTERN_DEF_CODE}', extern_def_code) .replace( '${PY_REGISTER_CODE}', textwrap.indent(py_register_code, ' ') ) .replace('${PY_INIT_PLUS}', init_plus_code) .strip() + '\n' ) os.makedirs(os.path.dirname(outpath), exist_ok=True) with open(outpath, 'w', encoding='utf-8') as outfile: outfile.write(out)
[docs] def py_examine() -> None: """Run a python examination at a given point in a given file.""" import os from pathlib import Path import efrotools.emacs if len(sys.argv) != 7: print('ERROR: expected 7 args') sys.exit(255) filename = Path(sys.argv[2]) line = int(sys.argv[3]) column = int(sys.argv[4]) selection: str | None = None if sys.argv[5] == '' else sys.argv[5] operation = sys.argv[6] # This stuff assumes it is being run from project root. os.chdir(pcommand.PROJROOT) # Set up pypaths so our main distro stuff works. scriptsdir = os.path.abspath( os.path.join( os.path.dirname(sys.argv[0]), '../src/assets/ba_data/python' ) ) toolsdir = os.path.abspath( os.path.join(os.path.dirname(sys.argv[0]), '../tools') ) if scriptsdir not in sys.path: sys.path.append(scriptsdir) if toolsdir not in sys.path: sys.path.append(toolsdir) efrotools.emacs.py_examine( pcommand.PROJROOT, filename, line, column, selection, operation )
[docs] def clean_orphaned_assets() -> None: """Remove asset files that are no longer part of the build.""" import os import json import subprocess # Operate from dist root.. os.chdir(pcommand.PROJROOT) # Our manifest is split into 2 files (public and private) with open( 'src/assets/.asset_manifest_public.json', encoding='utf-8' ) as infile: manifest = set(json.loads(infile.read())) with open( 'src/assets/.asset_manifest_private.json', encoding='utf-8' ) as infile: manifest.update(set(json.loads(infile.read()))) for root, _dirs, fnames in os.walk('build/assets'): for fname in fnames: fpath = os.path.join(root, fname) fpathrel = fpath[13:] # paths are relative to build/assets if fpathrel not in manifest: print(f'Removing orphaned asset file: {fpath}') os.unlink(fpath) # Lastly, clear empty dirs. subprocess.run( 'find build/assets -depth -empty -type d -delete', shell=True, check=True, )
[docs] def win_ci_install_prereqs() -> None: """Install bits needed for basic win ci.""" import json from efrotools.efrocache import get_target # We'll need to pull a handful of things out of efrocache for the # build to succeed. Normally this would happen through our Makefile # targets but we can't use them under raw Windows so we need to just # hard-code whatever we need here. lib_dbg_x64 = 'build/prefab/lib/windows/Debug_x64' needed_targets: set[str] = { f'{lib_dbg_x64}/BallisticaKitGenericPlus.lib', f'{lib_dbg_x64}/BallisticaKitGenericPlus.pdb', 'ballisticakit-windows/Generic/BallisticaKit.ico', } # Look through everything that gets generated by our codegen builds # and pick out anything we need for our basic builds/tests. with open( 'src/codegen/.codegen_manifest_public.json', encoding='utf-8' ) as infile: codegen_public: list[str] = json.loads(infile.read()) with open( 'src/codegen/.codegen_manifest_private.json', encoding='utf-8' ) as infile: codegen_private: list[str] = json.loads(infile.read()) for target in codegen_public + codegen_private: if ( target.startswith('src/ballistica/') and '/generated/' in target ) or ( target.startswith('src/assets/ba_data/python/') and '/_generated/' in target ): needed_targets.add(target) for target in needed_targets: get_target(target, batch=pcommand.is_batch(), clr=pcommand.clr())
[docs] def win_ci_binary_build() -> None: """Simple windows binary build for ci.""" import subprocess # Do the thing. subprocess.run( [ 'C:\\Program Files\\Microsoft Visual Studio\\2022\\' 'Enterprise\\MSBuild\\Current\\Bin\\MSBuild.exe', 'ballisticakit-windows\\Generic\\BallisticaKitGeneric.vcxproj', '-target:Build', '-property:Configuration=Debug', '-property:Platform=x64', '-property:VisualStudioVersion=17', ], check=True, )
[docs] def update_cmake_prefab_lib() -> None: """Update prefab internal libs; run as part of a build.""" import subprocess import os from efro.error import CleanError from batools.build import PrefabPlatform if len(sys.argv) != 5: raise CleanError( 'Expected 3 args (standard/server, debug/release, build-dir)' ) buildtype = sys.argv[2] mode = sys.argv[3] builddir = sys.argv[4] if buildtype not in {'standard', 'server'}: raise CleanError(f'Invalid buildtype: {buildtype}') if mode not in {'debug', 'release'}: raise CleanError(f'Invalid mode: {mode}') # Our 'cmake' build targets use the Linux side of WSL; not native # Windows. platform = PrefabPlatform.get_current(wsl_targets_windows=False) suffix = '_server' if buildtype == 'server' else '_gui' target = ( f'build/prefab/lib/{platform.value}{suffix}/{mode}/libballisticaplus.a' ) # Build the target and then copy it to dst if it doesn't exist there # yet or the existing one is older than our target. subprocess.run(['make', target], check=True) libdir = os.path.join(builddir, 'prefablib') libpath = os.path.join(libdir, 'libballisticaplus.a') update = True time1 = os.path.getmtime(target) if os.path.exists(libpath): time2 = os.path.getmtime(libpath) if time1 <= time2: update = False if update: if not os.path.exists(libdir): os.makedirs(libdir, exist_ok=True) subprocess.run(['cp', target, libdir], check=True)
[docs] def android_archive_unstripped_libs() -> None: """Copy libs to a build archive.""" import subprocess from pathlib import Path from efro.error import CleanError from efro.terminal import Clr if len(sys.argv) != 4: raise CleanError('Expected 2 args; src-dir and dst-dir') src = Path(sys.argv[2]) dst = Path(sys.argv[3]) if dst.exists(): subprocess.run(['rm', '-rf', dst], check=True) dst.mkdir(parents=True, exist_ok=True) if not src.is_dir(): raise CleanError(f"Source dir not found: '{src}'") libname = 'libmain' libext = '.so' for abi, abishort in [ ('armeabi-v7a', 'arm'), ('arm64-v8a', 'arm64'), ('x86', 'x86'), ('x86_64', 'x86-64'), ]: srcpath = Path(src, abi, libname + libext) dstname = f'{libname}_{abishort}{libext}' dstpath = Path(dst, dstname) if srcpath.exists(): print(f'Archiving unstripped library: {Clr.BLD}{dstname}{Clr.RST}') subprocess.run(['cp', srcpath, dstpath], check=True) subprocess.run( ['tar', '-zcf', dstname + '.tgz', dstname], cwd=dst, check=True ) subprocess.run(['rm', dstpath], check=True)
[docs] def spinoff_test() -> None: """Test spinoff functionality.""" import batools.spinoff batools.spinoff.spinoff_test(sys.argv[2:])
[docs] def spinoff_check_submodule_parent() -> None: """Make sure this dst proj has a submodule parent.""" import os from efro.error import CleanError # Make sure we're a spinoff dst project. The spinoff command will be # a symlink if this is the case. if not os.path.exists('tools/spinoff'): raise CleanError( 'This does not appear to be a spinoff-enabled project.' ) if not os.path.islink('tools/spinoff'): raise CleanError('This project is a spinoff parent; we require a dst.') if not os.path.isdir('submodules/ballistica'): raise CleanError( 'This project is not using a submodule for its parent.\n' 'To set one up, run `tools/spinoff add-submodule-parent`' )
[docs] def gen_python_init_module() -> None: """Generate a basic __init__.py.""" import os from efro.error import CleanError from efro.terminal import Clr from batools.project import project_centric_path if len(sys.argv) != 3: raise CleanError('Expected an outfile arg.') outfilename = sys.argv[2] os.makedirs(os.path.dirname(outfilename), exist_ok=True) prettypath = project_centric_path( projroot=str(pcommand.PROJROOT), path=outfilename ) print(f'Codegen-building {Clr.BLD}{prettypath}{Clr.RST}') with open(outfilename, 'w', encoding='utf-8') as outfile: outfile.write( '# Released under the MIT License.' ' See LICENSE for details.\n' '#\n' )
[docs] def tests_warm_start() -> None: """Warm-start some stuff needed by tests. This keeps logs clearer by showing any binary builds/downloads we need to do instead of having those silently happen as part of tests. """ from batools import apprun # We do lots of apprun.python_command() within test. Pre-build the # binary that they need to do their thing. if not apprun.test_runs_disabled(): apprun.acquire_binary(purpose='running tests')
[docs] def wsl_build_check_win_drive() -> None: """Make sure we're building on a windows drive.""" import os import subprocess import textwrap from efro.error import CleanError from efrotools.util import ( is_wsl_windows_build_path, wsl_windows_build_path_description, ) # We use env vars to influence our behavior and thus can't support # batch. if ( subprocess.run( ['which', 'wslpath'], check=False, capture_output=True ).returncode != 0 ): raise CleanError( "'wslpath' not found. This does not seem to be a WSL environment." ) if os.environ.get('WSL_BUILD_CHECK_WIN_DRIVE_IGNORE') == '1': return nativepath = os.getcwd() # Get a windows path to the current dir. winpath = ( subprocess.run( ['wslpath', '-w', '-a', nativepath], capture_output=True, check=True, ) .stdout.decode() .strip() ) def _wrap(txt: str) -> str: return textwrap.fill(txt, 76) # If we're sitting under the linux filesystem, our path will start # with '\\wsl$' or '\\wsl.localhost' or '\\wsl\'; fail in that case # and explain why. if any( winpath.startswith(x) for x in ['\\\\wsl$', '\\\\wsl.', '\\\\wsl\\'] ): raise CleanError( '\n\n'.join( [ _wrap( 'ERROR: This project appears to live' ' on the Linux filesystem.' ), _wrap( 'Visual Studio compiles will error here' ' for reasons related to Linux filesystem' ' case-sensitivity, and thus are disallowed.' ' Clone the repo to a location that maps to a native' ' Windows drive such as \'/mnt/c/ballistica\'' ' and try again.' ), _wrap( 'Note that WSL2 filesystem performance' ' is poor when accessing native Windows drives,' ' so if Visual Studio builds are not needed it may' ' be best to keep things here on the Linux filesystem.' ' This behavior may differ under WSL1 (untested).' ), _wrap( 'Set env-var WSL_BUILD_CHECK_WIN_DRIVE_IGNORE=1 to skip' ' this check.' ), ] ) ) # We also now require this check to be true. We key off this same # check in other places to introduce various workarounds to deal # with funky permissions issues/etc. # # Note that we could rely on *only* this check, but it might be nice # to leave the above one in as well to better explain the Linux # filesystem situation. if not is_wsl_windows_build_path(nativepath): reqs = wsl_windows_build_path_description() raise CleanError( '\n\n'.join( [ _wrap( f'ERROR: This project\'s path ({nativepath})' f' is not valid for WSL Windows builds.' f' Path must be: {reqs}.' ) ] ) )
[docs] def wsl_path_to_win() -> None: """Forward escape slashes in a provided win path arg.""" import subprocess import logging import os from efro.error import CleanError try: create = False escape = False if len(sys.argv) < 3: raise CleanError('Expected at least 1 path arg.') wsl_path: str | None = None for arg in sys.argv[2:]: if arg == '--create': create = True elif arg == '--escape': escape = True else: if wsl_path is not None: raise CleanError('More than one path provided.') wsl_path = arg if wsl_path is None: raise CleanError('No path provided.') # wslpath fails on nonexistent paths; make it clear when that happens. if create: os.makedirs(wsl_path, exist_ok=True) if not os.path.exists(wsl_path): raise CleanError(f'Path \'{wsl_path}\' does not exist.') results = subprocess.run( ['wslpath', '-w', '-a', wsl_path], capture_output=True, check=True ) except Exception: # This gets used in a makefile so our returncode is ignored; # let's try to make our failure known in other ways. logging.exception('wsl_to_escaped_win_path failed.') print('wsl_to_escaped_win_path_error_occurred', end='') return out = results.stdout.decode().strip() # If our input ended with a slash, match in the output. if wsl_path.endswith('/') and not out.endswith('\\'): out += '\\' if escape: out = out.replace('\\', '\\\\') print(out, end='')
[docs] def get_modern_make() -> None: """Print name of a modern make command.""" import platform import subprocess # Mac gnu make is outdated (due to newer versions using GPL3 I believe). # so let's return 'gmake' there which will point to homebrew make which # should be up to date. if platform.system() == 'Darwin': if ( subprocess.run( ['which', 'gmake'], check=False, capture_output=True ).returncode != 0 ): print( 'WARNING: this requires gmake (mac system make is too old).' " Install it with 'brew install make'", file=sys.stderr, flush=True, ) print('gmake') else: print('make')
[docs] def assetpins() -> None: """Inspect and update asset-package pins. Subcommands:: assetpins # default: same as 'list' assetpins list # list pins + master's # latest-available assetpins help # show usage examples assetpins update <TARGET> <VERSION> # mutate matched pins assetpins check # exit non-zero on any # dev/test pin VERSION can be: - ``latest`` — current track, newest version. - ``prod`` / ``test`` / ``dev`` — switch (or stay on) the named track, newest version of it. - A full version string as seen in the third segment of an apverid: ``<digits>`` for prod (e.g. ``260513a``), ``test<digits>`` for test (e.g. ``test260512a``), or ``dev<digits>`` for dev (e.g. ``dev260513a``). The track is inferred from the prefix; account and package come from each pin's own apverid. TARGET can be: - ``all`` — every discovered pin. - ``<package-name>`` (e.g. ``bastdassets``) — every pin of that asset-package across accounts. - A file path (e.g. ``pconfig/projectconfig.json``) — exactly one pin. Pins live in ``pconfig/projectconfig.json`` (the construct-mode/bootloader pin) and per-wrapper ``# ba_meta require asset-package <id>`` lines in Python wrapper modules under ``src/assets/ba_data/python/``. Each pin is independent — moving one pin does not move any other; track-switching is an explicit deliberate operation. ``assetpins`` is the only command that mutates pin state, and the only build-flow phase that talks to the cloud — see ``efrohome/docs/global_design/build_system.md``. """ from pathlib import Path from efro.error import CleanError from efro.terminal import Clr from batools import assetpins as assetpins_module args = sys.argv[2:] if not args or args == ['list']: assetpins_module.do_list(Path(pcommand.PROJROOT)) return if args == ['help']: assetpins_module.do_help() return if args[0] == 'update': rest = args[1:] if len(rest) != 2: raise CleanError( f'assetpins update: expected exactly two args' f' (TARGET VERSION), got {len(rest)}.' f' Try {Clr.BLD}assetpins update all latest{Clr.RST}.' ) target_str, version_str = rest assetpins_module.do_update( Path(pcommand.PROJROOT), target_str, version_str ) return if args[0] == 'check': if len(args) > 1: raise CleanError('assetpins check: takes no args.') offenders = assetpins_module.do_check(Path(pcommand.PROJROOT)) if not offenders: print(f'{Clr.GRN}assetpins check: clean.{Clr.RST}') return lines = [f'{Clr.RED}assetpins check: found non-prod pin(s):{Clr.RST}'] for pin in offenders: lines.append( f' {Clr.BLD}{pin.file_path}{Clr.RST}:' f' {Clr.CYN}{pin.apverid}{Clr.RST}' f' ({pin.pin_type.value})' ) lines.append( f'{Clr.YLW}Fix: run `make assetpins-latest` to move to the' f' current-track latest, or `tools/pcommand assetpins update' f' prod <target>` to switch to prod.{Clr.RST}' ) raise CleanError('\n'.join(lines)) raise CleanError( f'Unknown assetpins subcommand: {args[0]!r}.' f' Try {Clr.BLD}assetpins{Clr.RST} (list),' f' {Clr.BLD}assetpins help{Clr.RST} (examples),' f' {Clr.BLD}assetpins update <TARGET> <VERSION>{Clr.RST}, or' f' {Clr.BLD}assetpins check{Clr.RST}.' )
[docs] def asset_bundle_build() -> None: """Build a bundled-asset variant for the projectconfig pin. Takes one arg: a variant name (``gui`` or ``headless``). - ``gui``: real renderable assets (fallback-profile regular-quality textures + English language + constant). - ``headless``: same bucket *shape* as gui, but with ``null``-profile textures (single shared empty blob) so headless server builds keep the same wrapper-module layout (and same type-checks) without shipping image data. Reads the apverid from ``pconfig/projectconfig.json``'s ``"assets"`` field. The projectconfig value is expected to be a fully-resolved apverid (``<owner>.<name>.<seg>`` where ``<seg>`` is digits, ``devN``, or ``testN``). If it's the unresolved pseudo-id ``<owner>.<name>.dev``, the build refuses with a single-tunnel "run ``make assetpins-latest``" error. Pin-state mutation lives exclusively in ``tools/pcommand assetpins``. Writes ``.cache/asset_bundle/<variant>/manifest.json`` plus CAS-keyed bucket manifests + data blobs under ``.cache/assetdata/``. Both variants share the same ``.cache/assetdata/`` (CAS dedup). Staging picks the right variant to copy into ``<staged>/ba_data/`` based on the build target. """ import os import json import subprocess from efro.error import CleanError from efro.terminal import Clr from efrotools.project import getprojectconfig from batools.assetpins import is_unresolved_dev args = pcommand.get_args() if len(args) != 1: raise CleanError('Expected 1 arg (variant name).') variant = args[0] valid_variants = ('gui', 'headless') if variant not in valid_variants: raise CleanError( f"Invalid variant {variant!r};" f' expected one of {valid_variants}.' ) apversion = getprojectconfig(pcommand.PROJROOT).get('assets') if not isinstance(apversion, str) or not apversion: raise CleanError( f"Need a string 'assets' value in projectconfig;" f' got {type(apversion).__name__} value {apversion!r}.' ) # Bare ``<owner>.<name>.dev`` is a request for the latest # dev snapshot; it must be resolved before any build path # consumes it. ``assetpins upgrade`` owns the resolution + # writeback; we just refuse here with the single-tunnel # fix-message. if is_unresolved_dev(apversion): raise CleanError( f"projectconfig 'assets' is the unresolved pseudo-id" f' {apversion!r}; run `make assetpins-latest` to' f' resolve it to a concrete devN snapshot first.' ) # The gui variant ships real renderable textures; the # headless variant uses the special ``null`` profile so the # bucket manifest has the same logical paths but every # entry points at a single shared empty blob. texture_profile = 'fallback_v1' if variant == 'gui' else 'null' bundle_path = f'.cache/asset_bundle/{variant}/manifest.json' # Steady-state early-out: resolved apverid + existing manifest # at the same apverid → nothing to do. The pre-resolved-id # invariant means we can short-circuit unconditionally when # the apverid matches; no more "is this dev?" branching. if os.path.exists(bundle_path): try: with open(bundle_path, encoding='utf-8') as infile: existing = json.load(infile) except Exception: existing = None if isinstance(existing, dict): packages = existing.get('asset_packages') or [] if ( isinstance(packages, list) and len(packages) == 1 and isinstance(packages[0], dict) and packages[0].get('apverid') == apversion ): return print( f'{Clr.BLU}Building {variant} asset bundle for' f' {apversion}...{Clr.RST}', flush=True, ) try: subprocess.run( [ f'{pcommand.PROJROOT}/tools/bacloud', 'assetpackage', '_assemble', apversion, '--texture-profile', texture_profile, '--texture-quality', 'regular', '--language', 'eng', '--bundle-path', bundle_path, ], check=True, ) except Exception as exc: raise CleanError( f'Failed to build {variant} asset bundle for' f' {apversion}. If the apverid no longer exists on' f' master (dev snapshots get pruned quickly), run' f' `make assetpins-latest` to re-resolve.' ) from exc bundle_manifest = os.path.join(pcommand.PROJROOT, bundle_path) if not os.path.exists(bundle_manifest): raise CleanError( f'Asset bundle build completed but {bundle_manifest}' f' is missing.' )
[docs] def cst_test() -> None: """Test filtering a Python file using LibCST.""" from typing import override from efro.error import CleanError import libcst as cst from libcst import CSTTransformer, Name, Index, Subscript args = pcommand.get_args() if len(args) != 2: raise CleanError('Expected an in-path and out-path.') filename = args[0] filenameout = args[1] class RemoveAnnotatedTransformer(CSTTransformer): """Replaces `Annotated[FOO, ...]` with just `FOO`""" @override def leave_Subscript( self, original_node: BaseExpression, updated_node: BaseExpression ) -> BaseExpression: if ( isinstance(updated_node, Subscript) and isinstance(updated_node.value, Name) and updated_node.value.value == 'Annotated' and isinstance(updated_node.slice[0].slice, Index) ): return updated_node.slice[0].slice.value return updated_node with open(filename, 'r', encoding='utf-8') as f: source_code: str = f.read() tree: cst.Module = cst.parse_module(source_code) modified_tree: cst.Module = tree.visit(RemoveAnnotatedTransformer()) with open(filenameout, 'w', encoding='utf-8') as f: f.write(modified_tree.code) print('Success!')
# Docs-generation hack; import some stuff that we likely only forward-declared # in our actual source code so that docs tools can find it. from typing import (Coroutine, Any, Literal, Callable, Generator, Awaitable, Sequence, Self) import asyncio from concurrent.futures import Future from pathlib import Path from enum import Enum