SDL_image: Update build-scripts/build-release.py and Android aar package

From 7136334b8e7c1e71cee89f19c36829974c3f3ad0 Mon Sep 17 00:00:00 2001
From: Anonymous Maarten <[EMAIL REDACTED]>
Date: Tue, 21 Jan 2025 21:18:45 +0100
Subject: [PATCH] Update build-scripts/build-release.py and Android aar package

[ci skip]
---
 .github/workflows/release.yml                 |  10 +-
 build-scripts/build-release.py                | 214 ++++++++++++++----
 .../android/{INSTALL.md.in => README.md.in}   |  26 ++-
 .../android/{ => aar}/__main__.py.in          |   0
 .../{ => aar}/cmake/SDL3_imageConfig.cmake    |  22 +-
 .../cmake/SDL3_imageConfigVersion.cmake.in    |   0
 .../android/{ => aar}/description.json.in     |   0
 build-scripts/release-info.json               |  18 +-
 8 files changed, 215 insertions(+), 75 deletions(-)
 rename build-scripts/pkg-support/android/{INSTALL.md.in => README.md.in} (62%)
 rename build-scripts/pkg-support/android/{ => aar}/__main__.py.in (100%)
 rename build-scripts/pkg-support/android/{ => aar}/cmake/SDL3_imageConfig.cmake (83%)
 rename build-scripts/pkg-support/android/{ => aar}/cmake/SDL3_imageConfigVersion.cmake.in (100%)
 rename build-scripts/pkg-support/android/{ => aar}/description.json.in (100%)

diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 9fe2a472..82ea90dc 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -692,7 +692,9 @@ jobs:
       - name: 'Extract Android SDK from AAR'
         id: sdk
         run: |
-          python "${{ github.workspace }}/${{ needs.android.outputs.android-aar }}" -o /tmp/deps-android
+          unzip -o "${{ github.workspace }}/${{ needs.android.outputs.android-aar }}"
+          python "${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}.aar" -o /tmp/SDL3_image-android
+          echo "prefix=/tmp/SDL3_image-android" >>$GITHUB_OUTPUT
       - name: 'Download dependencies'
         id: deps
         env:
@@ -707,7 +709,9 @@ jobs:
       - name: 'Extract dependencies'
         id: deps-extract
         run: |
-          python "${{ steps.deps.outputs.dep-path }}/SDL3-${{ steps.deps.outputs.dep-sdl-version }}.aar" -o /tmp/deps-android
+          unzip -o "${{ steps.deps.outputs.dep-path }}/SDL3-devel-${{ steps.deps.outputs.dep-sdl-version }}-android.zip"
+          python "SDL3-${{ steps.deps.outputs.dep-sdl-version }}.aar" -o /tmp/SDL3-android
+          echo "sdl3-prefix=/tmp/SDL3-android" >>$GITHUB_OUTPUT
       - name: 'Install ninja'
         run: |
           sudo apt-get update -y
@@ -721,7 +725,7 @@ jobs:
               -GNinja \
               -DTEST_FULL=TRUE \
               -DTEST_STATIC=FALSE \
-              -DCMAKE_PREFIX_PATH="/tmp/deps-android" \
+              -DCMAKE_PREFIX_PATH="${{ steps.sdk.outputs.prefix }};${{ steps.deps-extract.outputs.sdl3-prefix }}" \
               -DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK_HOME}/build/cmake/android.toolchain.cmake \
               -DANDROID_ABI=${android_abi} \
               -DCMAKE_BUILD_TYPE=Release \
diff --git a/build-scripts/build-release.py b/build-scripts/build-release.py
index fe4db21f..f3faa06c 100755
--- a/build-scripts/build-release.py
+++ b/build-scripts/build-release.py
@@ -330,6 +330,10 @@ def configure_text(text: str, context: dict[str, str]) -> str:
     return text
 
 
+def configure_text_list(text_list: list[str], context: dict[str, str]) -> list[str]:
+    return [configure_text(text=e, context=context) for e in text_list]
+
+
 class ArchiveFileTree:
     def __init__(self):
         self._tree: dict[str, NodeInArchive] = {}
@@ -337,6 +341,12 @@ def __init__(self):
     def add_file(self, file: NodeInArchive):
         self._tree[file.arcpath] = file
 
+    def __iter__(self) -> typing.Iterable[NodeInArchive]:
+        yield from self._tree.values()
+
+    def __contains__(self, value: str) -> bool:
+        return value in self._tree
+
     def get_latest_mod_time(self) -> datetime.datetime:
         return max(item.time for item in self._tree.values() if item.time)
 
@@ -345,13 +355,12 @@ def add_to_archiver(self, archive_base: str, archiver: Archiver):
         added_files = dict()
 
         def calculate_symlink_target(s: NodeInArchive) -> str:
-            dest_dir = os.path.dirname(s.path)
+            dest_dir = os.path.dirname(s.arcpath)
             if dest_dir:
                 dest_dir += "/"
             target = dest_dir + s.symtarget
             while True:
                 new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target)
-                print(f"{target=} {new_target=}")
                 target = new_target
                 if not n:
                     break
@@ -359,12 +368,15 @@ def calculate_symlink_target(s: NodeInArchive) -> str:
 
         # Add files in first pass
         for arcpath, node in self._tree.items():
+            assert node is not None, f"{arcpath} -> node"
             if node.data is not None:
                 archiver.add_file_data(arcpath=arc_join(archive_base, arcpath), data=node.data, time=node.time, mode=node.mode)
-                added_files[node.path] = node
+                assert node.arcpath is not None, f"{node=}"
+                added_files[node.arcpath] = node
             elif node.path is not None:
                 archiver.add_file_path(arcpath=arc_join(archive_base, arcpath), path=node.path)
-                added_files[node.path] = node
+                assert node.arcpath is not None, f"{node=}"
+                added_files[node.arcpath] = node
             elif node.symtarget is not None:
                 remaining_symlinks.add(node)
             elif node.directory:
@@ -372,6 +384,8 @@ def calculate_symlink_target(s: NodeInArchive) -> str:
             else:
                 raise ValueError(f"Invalid Archive Node: {repr(node)}")
 
+        assert None not in added_files
+
         # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive
         while True:
             if not remaining_symlinks:
@@ -382,18 +396,18 @@ def calculate_symlink_target(s: NodeInArchive) -> str:
                 symlink_files_for_zip = {}
                 symlink_target_path = calculate_symlink_target(symlink)
                 if symlink_target_path in added_files:
-                    symlink_files_for_zip[symlink.path] = added_files[symlink_target_path]
+                    symlink_files_for_zip[symlink.arcpath] = added_files[symlink_target_path]
                 else:
                     symlink_target_path_slash = symlink_target_path + "/"
                     for added_file in added_files:
                         if added_file.startswith(symlink_target_path_slash):
-                            path_in_symlink = symlink.path + "/" + added_file.removeprefix(symlink_target_path_slash)
+                            path_in_symlink = symlink.arcpath + "/" + added_file.removeprefix(symlink_target_path_slash)
                             symlink_files_for_zip[path_in_symlink] = added_files[added_file]
                 if symlink_files_for_zip:
                     symlinks_this_time.add(symlink)
                     extra_added_files.update(symlink_files_for_zip)
                     files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()]
-                    archiver.add_symlink(arcpath=f"{archive_base}/{symlink.path}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip)
+                    archiver.add_symlink(arcpath=f"{archive_base}/{symlink.arcpath}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip)
             # if not symlinks_this_time:
             #     logger.info("files added: %r", set(path for path in added_files.keys()))
             assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}"
@@ -520,6 +534,15 @@ def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime
         return path_times
 
 
+class AndroidApiVersion:
+    def __init__(self, name: str, ints: tuple[int, ...]):
+        self.name = name
+        self.ints = ints
+
+    def __repr__(self) -> str:
+        return f"<{self.name} ({'.'.join(str(v) for v in self.ints)})>"
+
+
 class Releaser:
     def __init__(self, release_info: dict, commit: str, revision: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool):
         self.release_info = release_info
@@ -547,6 +570,7 @@ def get_context(self, extra_context: typing.Optional[dict[str, str]]=None) -> di
             "PROJECT_VERSION": self.version,
             "PROJECT_COMMIT": self.commit,
             "PROJECT_REVISION": self.revision,
+            "PROJECT_ROOT": str(self.root),
         }
         if extra_context:
             ctx.update(extra_context)
@@ -579,12 +603,22 @@ def _external_repo_path_filter(cls, path: str) -> bool:
     def create_source_archives(self) -> None:
         source_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter)
         print(f"Collecting sources of {self.project}...")
-        archive_tree = source_collector.get_archive_file_tree()
+        archive_tree: ArchiveFileTree = source_collector.get_archive_file_tree()
         latest_mod_time = archive_tree.get_latest_mod_time()
         archive_tree.add_file(NodeInArchive.from_text(arcpath=REVISION_TXT, text=f"{self.revision}\n", time=latest_mod_time))
         archive_tree.add_file(NodeInArchive.from_text(arcpath=f"{GIT_HASH_FILENAME}", text=f"{self.commit}\n", time=latest_mod_time))
         archive_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["source"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=latest_mod_time)
 
+        if "Makefile.am" in archive_tree:
+            patched_time = latest_mod_time + datetime.timedelta(minutes=1)
+            print(f"Makefile.am detected -> touching aclocal.m4, */Makefile.in, configure")
+            for node_data in archive_tree:
+                arc_name = os.path.basename(node_data.arcpath)
+                arc_name_we, arc_name_ext = os.path.splitext(arc_name)
+                if arc_name in ("aclocal.m4", "configure", "Makefile.in"):
+                    print(f"Bumping time of {node_data.arcpath}")
+                    node_data.time = patched_time
+
         archive_base = f"{self.project}-{self.version}"
         zip_path = self.dist_path / f"{archive_base}.zip"
         tgz_path = self.dist_path / f"{archive_base}.tar.gz"
@@ -752,24 +786,38 @@ def extract_filter(member: tarfile.TarInfo, path: str, /):
                 install_path = build_parent_dir / f"install-{triplet}"
                 shutil.rmtree(install_path, ignore_errors=True)
                 build_path.mkdir(parents=True, exist_ok=True)
+                context = self.get_context({
+                    "ARCH": arch,
+                    "DEP_PREFIX": str(mingw_deps_path / triplet),
+                })
+                extra_args = configure_text_list(text_list=self.release_info["mingw"]["autotools"]["args"], context=context)
+
                 with self.section_printer.group(f"Configuring MinGW {triplet} (autotools)"):
-                    extra_args = [arg.replace("@DEP_PREFIX@", str(mingw_deps_path / triplet)) for arg in self.release_info["mingw"]["autotools"]["args"]]
                     assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
                     self.executer.run([
                         self.root / "configure",
                         f"--prefix={install_path}",
-                        f"--includedir={install_path}/include",
-                        f"--libdir={install_path}/lib",
-                        f"--bindir={install_path}/bin",
+                        f"--includedir=${{prefix}}/include",
+                        f"--libdir=${{prefix}}/lib",
+                        f"--bindir=${{prefix}}/bin",
                         f"--host={triplet}",
                         f"--build=x86_64-none-linux-gnu",
+                        "CFLAGS=-O2",
+                        "CXXFLAGS=-O2",
+                        "LDFLAGS=-Wl,-s",
                     ] + extra_args, cwd=build_path, env=new_env)
                 with self.section_printer.group(f"Build MinGW {triplet} (autotools)"):
                     self.executer.run(["make", f"-j{self.cpu_count}"], cwd=build_path, env=new_env)
                 with self.section_printer.group(f"Install MinGW {triplet} (autotools)"):
                     self.executer.run(["make", "install"], cwd=build_path, env=new_env)
                 self.verify_mingw_library(triplet=ARCH_TO_TRIPLET[arch], path=install_path / "bin" / f"{self.project}.dll")
-                archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path)
+                archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time)
+
+                print("Recording arch-dependent extra files for MinGW development archive ...")
+                extra_context = {
+                    "TRIPLET": ARCH_TO_TRIPLET[arch],
+                }
+                archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"]["autotools"].get("files", {}), file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time)
 
         if "cmake" in self.release_info["mingw"]:
             assert self.release_info["mingw"]["cmake"]["shared-static"] in ("args", "both")
@@ -782,6 +830,12 @@ def extract_filter(member: tarfile.TarInfo, path: str, /):
                 assert arch not in mingw_archs
                 mingw_archs.add(arch)
 
+                context = self.get_context({
+                    "ARCH": arch,
+                    "DEP_PREFIX": str(mingw_deps_path / triplet),
+                })
+                extra_args = configure_text_list(text_list=self.release_info["mingw"]["cmake"]["args"], context=context)
+
                 build_path = build_parent_dir / f"build-{triplet}"
                 install_path = build_parent_dir / f"install-{triplet}"
                 shutil.rmtree(install_path, ignore_errors=True)
@@ -792,7 +846,6 @@ def extract_filter(member: tarfile.TarInfo, path: str, /):
                     args_for_shared_static = (["-DBUILD_SHARED_LIBS=ON"], ["-DBUILD_SHARED_LIBS=OFF"])
                 for arg_for_shared_static in args_for_shared_static:
                     with self.section_printer.group(f"Configuring MinGW {triplet} (CMake)"):
-                        extra_args = [arg.replace("@DEP_PREFIX@", str(mingw_deps_path / triplet)) for arg in self.release_info["mingw"]["cmake"]["args"]]
                         assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
                         self.executer.run([
                             f"cmake",
@@ -816,6 +869,13 @@ def extract_filter(member: tarfile.TarInfo, path: str, /):
                 self.verify_mingw_library(triplet=ARCH_TO_TRIPLET[arch], path=install_path / "bin" / f"{self.project}.dll")
                 archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time)
 
+                print("Recording arch-dependent extra files for MinGW development archive ...")
+                extra_context = {
+                    "TRIPLET": ARCH_TO_TRIPLET[arch],
+                }
+                archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"]["cmake"].get("files", {}), file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time)
+                print("... done")
+
         print("Recording extra files for MinGW development archive ...")
         archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
         print("... done")
@@ -834,22 +894,25 @@ def extract_filter(member: tarfile.TarInfo, path: str, /):
         self.artifacts["mingw-devel-tar-gz"] = tgz_path
         self.artifacts["mingw-devel-tar-xz"] = txz_path
 
-    def _detect_android_api(self, android_home: str) -> typing.Optional[int]:
+    def _detect_android_api(self, android_home: str) -> typing.Optional[AndroidApiVersion]:
         platform_dirs = list(Path(p) for p in glob.glob(f"{android_home}/platforms/android-*"))
-        re_platform = re.compile("android-([0-9]+)")
-        platform_versions = []
+        re_platform = re.compile("^android-([0-9]+)(?:-ext([0-9]+))?$")
+        platform_versions: list[AndroidApiVersion] = []
         for platform_dir in platform_dirs:
             logger.debug("Found Android Platform SDK: %s", platform_dir)
+            if not (platform_dir / "android.jar").is_file():
+                logger.debug("Skipping SDK, missing android.jar")
+                continue
             if m:= re_platform.match(platform_dir.name):
-                platform_versions.append(int(m.group(1)))
-        platform_versions.sort()
+                platform_versions.append(AndroidApiVersion(name=platform_dir.name, ints=(int(m.group(1)), int(m.group(2) or 0))))
+        platform_versions.sort(key=lambda v: v.ints)
         logger.info("Available platform versions: %s", platform_versions)
-        platform_versions = list(filter(lambda v: v >= self._android_api_minimum, platform_versions))
-        logger.info("Valid platform versions (>=%d): %s", self._android_api_minimum, platform_versions)
+        platform_versions = list(filter(lambda v: v.ints >= self._android_api_minimum.ints, platform_versions))
+        logger.info("Valid platform versions (>=%s): %s", self._android_api_minimum.ints, platform_versions)
         if not platform_versions:
             return None
         android_api = platform_versions[0]
-        logger.info("Selected API version %d", android_api)
+        logger.info("Selected API version %s", android_api)
         return android_api
 
     def _get_prefab_json_text(self) -> str:
@@ -873,8 +936,19 @@ def _get_prefab_module_json_text(self, library_name: typing.Optional[str], expor
         return json.dumps(module_json_dict, indent=4)
 
     @property
-    def _android_api_minimum(self):
-        return self.release_info["android"]["api-minimum"]
+    def _android_api_minimum(self) -> AndroidApiVersion:
+        value = self.release_info["android"]["api-minimum"]
+        if isinstance(value, int):
+            ints = (value, )
+        elif isinstance(value, str):
+            ints = tuple(split("."))
+        else:
+            raise ValueError("Invalid android.api-minimum: must be X or X.Y")
+        match len(ints):
+            case 1: name = f"android-{ints[0]}"
+            case 2: name = f"android-{ints[0]}-ext-{ints[1]}"
+            case _: raise ValueError("Invalid android.api-minimum: must be X or X.Y")
+        return AndroidApiVersion(name=name, ints=ints)
 
     @property
     def _android_api_target(self):
@@ -887,7 +961,7 @@ def _android_ndk_minimum(self):
     def _get_prefab_abi_json_text(self, abi: str, cpp: bool, shared: bool) -> str:
         abi_json_dict = {
             "abi": abi,
-            "api": self._android_api_minimum,
+            "api": self._android_api_minimum.ints[0],
             "ndk": self._android_ndk_minimum,
             "stl": "c++_shared" if cpp else "none",
             "static": not shared,
@@ -900,7 +974,7 @@ def _get_android_manifest_text(self) -> str:
                 xmlns:android="http://schemas.android.com/apk/res/android"
                 package="org.libsdl.android.{self.project}" android:versionCode="1"
                 android:versionName="1.0">
-                <uses-sdk android:minSdkVersion="{self._android_api_minimum}"
+                <uses-sdk android:minSdkVersion="{self._android_api_minimum.ints[0]}"
                           android:targetSdkVersion="{self._android_api_target}" />
             </manifest>
         """)
@@ -910,7 +984,8 @@ def create_android_archives(self, android_api: int, android_home: Path, android_
         if not cmake_toolchain_file.exists():
             logger.error("CMake toolchain file does not exist (%s)", cmake_toolchain_file)
             raise SystemExit(1)
-        aar_path =  self.dist_path / f"{self.project}-{self.version}.aar"
+        aar_path = self.root / "build-android" / f"{self.project}-{self.version}.aar"
+        android_dist_path = self.dist_path / f"{self.project}-devel-{self.version}-android.zip"
         android_abis = self.release_info["android"]["abis"]
         java_jars_added = False
         module_data_added = False
@@ -918,14 +993,24 @@ def create_android_archives(self, android_api: int, android_home: Path, android_
         shutil.rmtree(android_deps_path, ignore_errors=True)
 
         for dep, depinfo in self.release_info["android"].get("dependencies", {}).items():
-            android_aar = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
-            with self.section_printer.group(f"Extracting Android dependency {dep} ({android_aar.name})"):
-                self.executer.run([sys.executable, str(android_aar), "-o", str(android_deps_path)])
+            dep_devel_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
+
+            dep_extract_path = self.deps_path / f"extract/android/{dep}"
+            shutil.rmtree(dep_extract_path, ignore_errors=True)
+            dep_extract_path.mkdir(parents=True, exist_ok=True)
+
+            with self.section_printer.group(f"Extracting Android dependency {dep} ({dep_devel_zip})"):
+                with zipfile.ZipFile(dep_devel_zip, "r") as zf:
+                    zf.extractall(dep_extract_path)
+
+                dep_devel_aar = dep_extract_path / glob.glob("*.aar", root_dir=dep_extract_path)[0]
+                self.executer.run([sys.executable, str(dep_devel_aar), "-o", str(android_deps_path)])
 
         for module_name, module_info in self.release_info["android"]["modules"].items():
             assert "type" in module_info and module_info["type"] in ("interface", "library"), f"module {module_name} must have a valid type"
 
-        archive_file_tree = ArchiveFileTree()
+        aar_file_tree = ArchiveFileTree()
+        android_devel_file_tree = ArchiveFileTree()
 
         for android_abi in android_abis:
             with self.section_printer.group(f"Building for Android {android_api} {android_abi}"):
@@ -976,20 +1061,20 @@ def create_android_archives(self, android_api: int, android_home: Path, android_
                         assert library.suffix in (".so", ".a")
                         assert library.is_file(), f"CMake should have built library '{library}' for module {module_name}"
                         arcdir_prefab_libs = f"{arcdir_prefab_module}/libs/android.{android_abi}"
-                        archive_file_tree.add_file(NodeInArchive.from_fs(arcpath=f"{arcdir_prefab_libs}/{library.name}", path=library, time=self.arc_time))
-                        archive_file_tree.add_file(NodeInArchive.from_text(arcpath=f"{arcdir_prefab_libs}/abi.json", text=self._get_prefab_abi_json_text(abi=android_abi, cpp=False, shared=library.suffix == ".so"), time=self.arc_time))
+                        aar_file_tree.add_file(NodeInArchive.from_fs(arcpath=f"{arcdir_prefab_libs}/{library.name}", path=library, time=self.arc_time))
+                        aar_file_tree.add_file(NodeInArchive.from_text(arcpath=f"{arcdir_prefab_libs}/abi.json", text=self._get_prefab_abi_json_text(abi=android_abi, cpp=False, shared=library.suffix == ".so"), time=self.arc_time))
 
                     if not module_data_added:
                         library_name = None
                         if module_info["type"] == "library":
                             library_name = Path(module_info["library"]).stem.removeprefix("lib")
                         export_libraries = module_info.get("export-libraries", [])
-                        archive_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_module, "module.json"), text=self._get_prefab_module_json_text(library_name=library_name, export_libraries=export_libraries), time=self.arc_time))
+                        aar_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_module, "module.json"), text=self._get_prefab_module_json_text(library_name=library_name, export_libraries=export_libraries), time=self.arc_time))
                         arcdir_prefab_include = f"prefab/modules/{module_name}/include"
                         if "includes" in module_info:
-                            archive_file_tree.add_file_mapping(arc_dir=arcdir_prefab_include, file_mapping=module_info["includes"], file_mapping_root=install_dir, context=self.get_context(), time=self.arc_time)
+                            aar_file_tree.add_file_mapping(arc_dir=arcdir_prefab_include, file_mapping=module_info["includes"], file_mapping_root=install_dir, context=self.get_context(), time=self.arc_time)
                         else:
-                            archive_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_include, ".keep"), text="\n", time=self.arc_time))
+                            aar_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_include, ".keep"), text="\n", time=self.arc_time))
                 module_data_added = True
 
                 if not java_jars_added:
@@ -1002,21 +1087,28 @@ def create_android_archives(self, android_api: int, android_home: Path, android_
                         assert sources_jar_path.is_file(), f"CMake should have archived the java sources into a JAR ({sources_jar_path})"
                         assert doc_jar_path.is_file(), f"CMake should have archived javadoc into a JAR ({doc_jar_path})"
 
-                        archive_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes.jar", path=classes_jar_path, time=self.arc_time))
-                        archive_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-sources.jar", path=sources_jar_path, time=self.arc_time))
-                        archive_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-doc.jar", path=doc_jar_path, time=self.arc_time))
+                        aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes.jar", path=classes_jar_path, time=self.arc_time))
+                        aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-sources.jar", path=sources_jar_path, time=self.arc_time))
+                        aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-doc.jar", path=doc_jar_path, time=self.arc_time))
 
         assert ("jars" in self.release_info["android"] and java_jars_added) or "jars" not in self.release_info["android"], "Must have archived java JAR archives"
 
-        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
+        aar_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"]["aar-files"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
 
-        archive_file_tree.add_file(NodeInArchive.from_text(arcpath="prefab/prefab.json", text=self._get_prefab_json_text(), time=self.arc_time))
-        archive_file_tree.add_file(NodeInArchive.from_text(arcpath="AndroidManifest.xml", text=self._get_android_manifest_text(), time=self.arc_time))
+        aar_file_tree.add_file(NodeInArchive.from_text(arcpath="prefab/prefab.json", text=self._get_prefab_json_text(), time=self.arc_time))
+        aar_file_tree.add_file(NodeInArchive.from_text(arcpath="AndroidManifest.xml", text=self._get_android_manifest_text(), time=self.arc_time))
 
         with Archiver(zip_path=aar_path) as archiver:
-            archive_file_tree.add_to_archiver(archive_base="", archiver=archiver)
+            aar_file_tree.add_to_archiver(archive_base="", archiver=archiver)
+            archiver.add_git_hash(arcdir="", commit=self.commit, time=self.arc_time)
+
+        android_devel_file_tree.add_file(NodeInArchive.from_fs(arcpath=aar_path.name, path=aar_path))
+        android_devel_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"]["files"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
+        with Archiver(zip_path=android_dist_path) as archiver:
+            android_devel_file_tree.add_to_archiver(archive_base="", archiver=archiver)
             archiver.add_git_hash(arcdir="", commit=self.commit, time=self.arc_time)
-        self.artifacts[f"android-aar"] = aar_path
+
+        self.artifacts[f"android-aar"] = android_dist_path
 
     def download_dependencies(self):
         shutil.rmtree(self.deps_path, ignore_errors=True)
@@ -1053,7 +1145,7 @@ def verify_dependencies(self):
                 assert len(msvc_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}"
             if "android" in self.release_info:
                 android_matches = glob.glob(self.release_info["android"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
-                assert len(android_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}"
+                assert len(android_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {android_matches}"
 
     @staticmethod
     def _arch_to_vs_platform(arch: str, configuration: str="Release") -> VsArchPlatformConfig:
@@ -1179,6 +1271,10 @@ def _build_msvc_cmake(self, arch_platform: VsArchPlatformConfig, dep_roots: list
         platform_context = self.get_context(extra_context=arch_platform.extra_context())
 
         build_type = "Release"
+        extra_context = {
+            "ARCH": arch_platform.arch,
+            "PLATFORM": arch_platform.platform,
+        }
 
         built_paths = set(install_path / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["cmake"]["files-lib"], self.release_info["msvc"]["cmake"]["files-devel"]) for files_list in file_mapping.values() for f in files_list)
         logger.info("CMake builds these files, to be included in the package: %s", built_paths)
@@ -1229,7 +1325,7 @@ def _build_msvc_cmake(self, arch_platform: VsArchPlatformConfig, dep_roots: list
         logger.info("Collecting files...")
         archive_file_tree = ArchiveFileTree()
         archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["cmake"]["files-lib"], file_mapping_root=install_path, context=platform_context, time=self.arc_time)
-        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
+        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time)
 
         logger.info("Creating %s", zip_path)
         with Archiver(zip_path=zip_path) as archiver:
@@ -1244,6 +1340,10 @@ def _build_msvc_devel(self) -> None:
         zip_path = self.dist_path / f"{self.project}-devel-{self.version}-VC.zip"
         arc_root = f"{self.project}-{self.version}"
 
+        def copy_files_devel(ctx):
+            archive_

(Patch may be truncated, please check the link at the top of this post.)