From 48aa1cccf70e90b028ae2eb775c6f4d9379dfa46 Mon Sep 17 00:00:00 2001
From: Anonymous Maarten <[EMAIL REDACTED]>
Date: Fri, 1 Nov 2024 23:08:36 +0100
Subject: [PATCH] Port build-script from SDL3
[ci skip]
---
.gitignore | 2 +-
CMakeLists.txt | 4 +-
build-scripts/build-release.py | 1059 ++++++++++++++++++++++---------
build-scripts/create-release.py | 11 +-
build-scripts/release-info.json | 104 ++-
build-scripts/showrev.sh | 4 +-
build-scripts/updaterev.sh | 2 +-
7 files changed, 821 insertions(+), 365 deletions(-)
diff --git a/.gitignore b/.gitignore
index 0af2680e36b56..f53f91c3b9231 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,7 +15,7 @@ build
gen
Build
buildbot
-/VERSION.txt
+/REVISION.txt
dist
*.so
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 0a9269a7e1399..10af989e6431b 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -3081,8 +3081,8 @@ endif()
# Compat helpers for the configuration files
-if(EXISTS "${PROJECT_SOURCE_DIR}/VERSION.txt")
- file(READ "${PROJECT_SOURCE_DIR}/VERSION.txt" SDL_SOURCE_VERSION)
+if(EXISTS "${PROJECT_SOURCE_DIR}/REVISION.txt")
+ file(READ "${PROJECT_SOURCE_DIR}/REVISION.txt" SDL_SOURCE_VERSION)
string(STRIP "${SDL_SOURCE_VERSION}" SDL_SOURCE_VERSION)
endif()
diff --git a/build-scripts/build-release.py b/build-scripts/build-release.py
index 0625ad6e1a6d7..5f3f7fa0d4fba 100755
--- a/build-scripts/build-release.py
+++ b/build-scripts/build-release.py
@@ -1,13 +1,14 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
"""
-This script is shared between SDL2, SDL2_image, SDL2_mixer and SDL2_ttf.
+This script is shared between SDL2, SDL3, and all satellite libraries.
Don't specialize this script for doing project-specific modifications.
Rather, modify release-info.json.
"""
import argparse
import collections
+import dataclasses
from collections.abc import Callable
import contextlib
import datetime
@@ -21,6 +22,7 @@
from pathlib import Path
import platform
import re
+import shlex
import shutil
import subprocess
import sys
@@ -30,10 +32,10 @@
import typing
import zipfile
-logger = logging.getLogger(__name__)
-
+logger = logging.getLogger(__name__)
GIT_HASH_FILENAME = ".git-hash"
+REVISION_TXT = "REVISION.txt"
def safe_isotime_to_datetime(str_isotime: str) -> datetime.datetime:
@@ -52,14 +54,23 @@ def safe_isotime_to_datetime(str_isotime: str) -> datetime.datetime:
raise ValueError(f"Invalid isotime: {str_isotime}")
-class VsArchPlatformConfig:
- def __init__(self, arch: str, platform: str, configuration: str):
- self.arch = arch
- self.platform = platform
- self.configuration = configuration
+def arc_join(*parts: list[str]) -> str:
+ assert all(p[:1] != "/" and p[-1:] != "/" for p in parts), f"None of {parts} may start or end with '/'"
+ return "/".join(p for p in parts if p)
+
- def configure(self, s: str) -> str:
- return s.replace("@ARCH@", self.arch).replace("@PLATFORM@", self.platform).replace("@CONFIGURATION@", self.configuration)
+@dataclasses.dataclass(frozen=True)
+class VsArchPlatformConfig:
+ arch: str
+ configuration: str
+ platform: str
+
+ def extra_context(self):
+ return {
+ "ARCH": self.arch,
+ "CONFIGURATION": self.configuration,
+ "PLATFORM": self.platform,
+ }
@contextlib.contextmanager
@@ -81,7 +92,7 @@ def run(self, cmd, cwd=None, env=None):
logger.info("Executing args=%r", cmd)
sys.stdout.flush()
if not self.dry:
- subprocess.run(cmd, check=True, cwd=cwd or self.root, env=env, text=True)
+ subprocess.check_call(cmd, cwd=cwd or self.root, env=env, text=True)
def check_output(self, cmd, cwd=None, dry_out=None, env=None, text=True):
logger.info("Executing args=%r", cmd)
@@ -211,6 +222,7 @@ def add_file_data(self, arcpath: str, data: bytes, mode: int, time: datetime.dat
self._added_files.add(arcpath)
def add_symlink(self, arcpath: str, target: str, time: datetime.datetime, files_for_zip):
+ logger.debug("Adding symlink (target=%r) -> %s", target, arcpath)
for zf in self._zip_files:
file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
for f in files_for_zip:
@@ -228,17 +240,14 @@ def add_symlink(self, arcpath: str, target: str, time: datetime.datetime, files_
self._added_files.update(f["arcpath"] for f in files_for_zip)
- def add_git_hash(self, commit: str, arcdir: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None):
- arcpath = GIT_HASH_FILENAME
- if arcdir and arcdir[-1:] != "/":
- arcpath = f"{arcdir}/{arcpath}"
- if not time:
- time = datetime.datetime(year=2024, month=4, day=1)
+ def add_git_hash(self, arcdir: str, commit: str, time: datetime.datetime):
+ arcpath = arc_join(arcdir, GIT_HASH_FILENAME)
data = f"{commit}\n".encode()
self.add_file_data(arcpath=arcpath, data=data, mode=0o100644, time=time)
def add_file_path(self, arcpath: str, path: Path):
assert path.is_file(), f"{path} should be a file"
+ logger.debug("Adding %s -> %s", path, arcpath)
for zf in self._zip_files:
zf.write(path, arcname=arcpath)
for tf in self._tar_files:
@@ -268,22 +277,192 @@ def __exit__(self, type, value, traceback):
self.close()
+class NodeInArchive:
+ def __init__(self, arcpath: str, path: typing.Optional[Path]=None, data: typing.Optional[bytes]=None, mode: typing.Optional[int]=None, symtarget: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None, directory: bool=False):
+ self.arcpath = arcpath
+ self.path = path
+ self.data = data
+ self.mode = mode
+ self.symtarget = symtarget
+ self.time = time
+ self.directory = directory
+
+ @classmethod
+ def from_fs(cls, arcpath: str, path: Path, mode: int=0o100644, time: typing.Optional[datetime.datetime]=None) -> "NodeInArchive":
+ if time is None:
+ time = datetime.datetime.fromtimestamp(os.stat(path).st_mtime)
+ return cls(arcpath=arcpath, path=path, mode=mode)
+
+ @classmethod
+ def from_data(cls, arcpath: str, data: bytes, time: datetime.datetime) -> "NodeInArchive":
+ return cls(arcpath=arcpath, data=data, time=time, mode=0o100644)
+
+ @classmethod
+ def from_text(cls, arcpath: str, text: str, time: datetime.datetime) -> "NodeInArchive":
+ return cls.from_data(arcpath=arcpath, data=text.encode(), time=time)
+
+ @classmethod
+ def from_symlink(cls, arcpath: str, symtarget: str) -> "NodeInArchive":
+ return cls(arcpath=arcpath, symtarget=symtarget)
+
+ @classmethod
+ def from_directory(cls, arcpath: str) -> "NodeInArchive":
+ return cls(arcpath=arcpath, directory=True)
+
+ def __repr__(self) -> str:
+ return f"<{type(self).__name__}:arcpath={self.arcpath},path='{str(self.path)}',len(data)={len(self.data) if self.data else 'n/a'},directory={self.directory},symtarget={self.symtarget}>"
+
+
+def configure_file(path: Path, context: dict[str, str]) -> bytes:
+ text = path.read_text()
+ return configure_text(text, context=context).encode()
+
+
+def configure_text(text: str, context: dict[str, str]) -> str:
+ original_text = text
+ for txt, repl in context.items():
+ text = text.replace(f"@<@{txt}@>@", repl)
+ success = all(thing not in text for thing in ("@<@", "@>@"))
+ if not success:
+ raise ValueError(f"Failed to configure {repr(original_text)}")
+ return text
+
+
+class ArchiveFileTree:
+ def __init__(self):
+ self._tree: dict[str, NodeInArchive] = {}
+
+ def add_file(self, file: NodeInArchive):
+ self._tree[file.arcpath] = file
+
+ def get_latest_mod_time(self) -> datetime.datetime:
+ return max(item.time for item in self._tree.values() if item.time)
+
+ def add_to_archiver(self, archive_base: str, archiver: Archiver):
+ remaining_symlinks = set()
+ added_files = dict()
+
+ def calculate_symlink_target(s: NodeInArchive) -> str:
+ dest_dir = os.path.dirname(s.arcpath)
+ if dest_dir:
+ dest_dir += "/"
+ target = dest_dir + s.symtarget
+ while True:
+ new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target)
+ print(f"{target=} {new_target=}")
+ target = new_target
+ if not n:
+ break
+ return target
+
+ # Add files in first pass
+ for arcpath, node in self._tree.items():
+ if node.data is not None:
+ archiver.add_file_data(arcpath=arc_join(archive_base, arcpath), data=node.data, time=node.time, mode=node.mode)
+ assert node.arcpath is not None, f"{node=} has arcpath=None"
+ added_files[node.arcpath] = node
+ elif node.path is not None:
+ archiver.add_file_path(arcpath=arc_join(archive_base, arcpath), path=node.path)
+ assert node.arcpath is not None, f"{node=} has arcpath=None"
+ added_files[node.arcpath] = node
+ elif node.symtarget is not None:
+ remaining_symlinks.add(node)
+ elif node.directory:
+ pass
+ else:
+ raise ValueError(f"Invalid Archive Node: {repr(node)}")
+
+ # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive
+ while True:
+ if not remaining_symlinks:
+ break
+ symlinks_this_time = set()
+ extra_added_files = {}
+ for symlink in remaining_symlinks:
+ symlink_files_for_zip = {}
+ symlink_target_path = calculate_symlink_target(symlink)
+ if symlink_target_path in added_files:
+ symlink_files_for_zip[symlink.arcpath] = added_files[symlink_target_path]
+ else:
+ symlink_target_path_slash = symlink_target_path + "/"
+ for added_file in added_files:
+ if added_file.startswith(symlink_target_path_slash):
+ path_in_symlink = symlink.arcpath + "/" + added_file.removeprefix(symlink_target_path_slash)
+ symlink_files_for_zip[path_in_symlink] = added_files[added_file]
+ if symlink_files_for_zip:
+ symlinks_this_time.add(symlink)
+ extra_added_files.update(symlink_files_for_zip)
+ files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()]
+ archiver.add_symlink(arcpath=f"{archive_base}/{symlink.arcpath}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip)
+ # if not symlinks_this_time:
+ # logger.info("files added: %r", set(path for path in added_files.keys()))
+ assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}"
+ remaining_symlinks.difference_update(symlinks_this_time)
+ added_files.update(extra_added_files)
+
+ def add_directory_tree(self, arc_dir: str, path: Path, time: datetime.datetime):
+ assert path.is_dir()
+ for files_dir, _, filenames in os.walk(path):
+ files_dir_path = Path(files_dir)
+ rel_files_path = files_dir_path.relative_to(path)
+ for filename in filenames:
+ self.add_file(NodeInArchive.from_fs(arcpath=arc_join(arc_dir, str(rel_files_path), filename), path=files_dir_path / filename, time=time))
+
+ def _add_files_recursively(self, arc_dir: str, paths: list[Path], time: datetime.datetime):
+ logger.debug(f"_add_files_recursively({arc_dir=} {paths=})")
+ for path in paths:
+ arcpath = arc_join(arc_dir, path.name)
+ if path.is_file():
+ logger.debug("Adding %s as %s", path, arcpath)
+ self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time))
+ elif path.is_dir():
+ self._add_files_recursively(arc_dir=arc_join(arc_dir, path.name), paths=list(path.iterdir()), time=time)
+ else:
+ raise ValueError(f"Unsupported file type to add recursively: {path}")
+
+ def add_file_mapping(self, arc_dir: str, file_mapping: dict[str, list[str]], file_mapping_root: Path, context: dict[str, str], time: datetime.datetime):
+ for meta_rel_destdir, meta_file_globs in file_mapping.items():
+ rel_destdir = configure_text(meta_rel_destdir, context=context)
+ assert "@" not in rel_destdir, f"archive destination should not contain an @ after configuration ({repr(meta_rel_destdir)}->{repr(rel_destdir)})"
+ for meta_file_glob in meta_file_globs:
+ file_glob = configure_text(meta_file_glob, context=context)
+ assert "@" not in rel_destdir, f"archive glob should not contain an @ after configuration ({repr(meta_file_glob)}->{repr(file_glob)})"
+ if ":" in file_glob:
+ original_path, new_filename = file_glob.rsplit(":", 1)
+ assert ":" not in original_path, f"Too many ':' in {repr(file_glob)}"
+ assert "/" not in new_filename, f"New filename cannot contain a '/' in {repr(file_glob)}"
+ path = file_mapping_root / original_path
+ arcpath = arc_join(arc_dir, rel_destdir, new_filename)
+ if path.suffix == ".in":
+ data = configure_file(path, context=context)
+ logger.debug("Adding processed %s -> %s", path, arcpath)
+ self.add_file(NodeInArchive.from_data(arcpath=arcpath, data=data, time=time))
+ else:
+ logger.debug("Adding %s -> %s", path, arcpath)
+ self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time))
+ else:
+ relative_file_paths = glob.glob(file_glob, root_dir=file_mapping_root)
+ assert relative_file_paths, f"Glob '{file_glob}' does not match any file"
+ self._add_files_recursively(arc_dir=arc_join(arc_dir, rel_destdir), paths=[file_mapping_root / p for p in relative_file_paths], time=time)
+
+
class SourceCollector:
- TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "symtarget", "directory", "time"))
+ # TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "symtarget", "directory", "time"))
def __init__(self, root: Path, commit: str, filter: typing.Optional[Callable[[str], bool]], executer: Executer):
self.root = root
self.commit = commit
self.filter = filter
self.executer = executer
- self._git_contents: typing.Optional[dict[str, SourceCollector.TreeItem]] = None
- def _get_git_contents(self) -> dict[str, TreeItem]:
- contents_tgz = subprocess.check_output(["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"], cwd=self.root, text=False)
+ def get_archive_file_tree(self) -> ArchiveFileTree:
+ git_archive_args = ["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"]
+ logger.info("Executing args=%r", git_archive_args)
+ contents_tgz = subprocess.check_output(git_archive_args, cwd=self.root, text=False)
tar_archive = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz")
filenames = tuple(m.name for m in tar_archive if (m.isfile() or m.issym()))
file_times = self._get_file_times(paths=filenames)
- git_contents = {}
+ git_contents = ArchiveFileTree()
for ti in tar_archive:
if self.filter and not self.filter(ti.name):
continue
@@ -302,15 +481,10 @@ def _get_git_contents(self) -> dict[str, TreeItem]:
directory = True
else:
raise ValueError(f"{ti.name}: unknown type")
- git_contents[ti.name] = self.TreeItem(path=ti.name, mode=ti.mode, data=data, symtarget=symtarget, directory=directory, time=file_time)
+ node = NodeInArchive(arcpath=ti.name, data=data, mode=ti.mode, symtarget=symtarget, time=file_time, directory=directory)
+ git_contents.add_file(node)
return git_contents
- @property
- def git_contents(self) -> dict[str, TreeItem]:
- if self._git_contents is None:
- self._git_contents = self._get_git_contents()
- return self._git_contents
-
def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime]:
dry_out = textwrap.dedent("""\
time=2024-03-14T15:40:25-07:00
@@ -345,67 +519,15 @@ def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime
return path_times
- def add_to_archiver(self, archive_base: str, archiver: Archiver):
- remaining_symlinks = set()
- added_files = dict()
-
- def calculate_symlink_target(s: SourceCollector.TreeItem) -> str:
- dest_dir = os.path.dirname(s.path)
- if dest_dir:
- dest_dir += "/"
- target = dest_dir + s.symtarget
- while True:
- new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target)
- print(f"{target=} {new_target=}")
- target = new_target
- if not n:
- break
- return target
-
- # Add files in first pass
- for git_file in self.git_contents.values():
- if git_file.data is not None:
- archiver.add_file_data(arcpath=f"{archive_base}/{git_file.path}", data=git_file.data, time=git_file.time, mode=git_file.mode)
- added_files[git_file.path] = git_file
- elif git_file.symtarget is not None:
- remaining_symlinks.add(git_file)
-
- # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive
- while True:
- if not remaining_symlinks:
- break
- symlinks_this_time = set()
- extra_added_files = {}
- for symlink in remaining_symlinks:
- symlink_files_for_zip = {}
- symlink_target_path = calculate_symlink_target(symlink)
- if symlink_target_path in added_files:
- symlink_files_for_zip[symlink.path] = added_files[symlink_target_path]
- else:
- symlink_target_path_slash = symlink_target_path + "/"
- for added_file in added_files:
- if added_file.startswith(symlink_target_path_slash):
- path_in_symlink = symlink.path + "/" + added_file.removeprefix(symlink_target_path_slash)
- symlink_files_for_zip[path_in_symlink] = added_files[added_file]
- if symlink_files_for_zip:
- symlinks_this_time.add(symlink)
- extra_added_files.update(symlink_files_for_zip)
- files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()]
- archiver.add_symlink(arcpath=f"{archive_base}/{symlink.path}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip)
- # if not symlinks_this_time:
- # logger.info("files added: %r", set(path for path in added_files.keys()))
- assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}"
- remaining_symlinks.difference_update(symlinks_this_time)
- added_files.update(extra_added_files)
-
class Releaser:
- def __init__(self, release_info: dict, commit: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool):
+ def __init__(self, release_info: dict, commit: str, revision: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool):
self.release_info = release_info
self.project = release_info["name"]
self.version = self.extract_sdl_version(root=root, release_info=release_info)
self.root = root
self.commit = commit
+ self.revision = revision
self.dist_path = dist_path
self.section_printer = section_printer
self.executer = executer
@@ -415,9 +537,21 @@ def __init__(self, release_info: dict, commit: str, root: Path, dist_path: Path,
self.overwrite = overwrite
self.github = github
self.fast = fast
+ self.arc_time = datetime.datetime.now()
self.artifacts: dict[str, Path] = {}
+ def get_context(self, extra_context: typing.Optional[dict[str, str]]=None) -> dict[str, str]:
+ ctx = {
+ "PROJECT_NAME": self.project,
+ "PROJECT_VERSION": self.version,
+ "PROJECT_COMMIT": self.commit,
+ "PROJECT_REVISION": self.revision,
+ }
+ if extra_context:
+ ctx.update(extra_context)
+ return ctx
+
@property
def dry(self) -> bool:
return self.executer.dry
@@ -443,12 +577,15 @@ def _external_repo_path_filter(cls, path: str) -> bool:
return True
def create_source_archives(self) -> None:
- archive_base = f"{self.project}-{self.version}"
-
- project_souce_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter)
-
- latest_mod_time = max(item.time for item in project_souce_collector.git_contents.values() if item.time)
+ source_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter)
+ print(f"Collecting sources of {self.project}...")
+ archive_tree = source_collector.get_archive_file_tree()
+ latest_mod_time = archive_tree.get_latest_mod_time()
+ archive_tree.add_file(NodeInArchive.from_text(arcpath=REVISION_TXT, text=f"{self.revision}\n", time=latest_mod_time))
+ archive_tree.add_file(NodeInArchive.from_text(arcpath=f"{GIT_HASH_FILENAME}", text=f"{self.commit}\n", time=latest_mod_time))
+ archive_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["source"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=latest_mod_time)
+ archive_base = f"{self.project}-{self.version}"
zip_path = self.dist_path / f"{archive_base}.zip"
tgz_path = self.dist_path / f"{archive_base}.tar.gz"
txz_path = self.dist_path / f"{archive_base}.tar.xz"
@@ -460,19 +597,18 @@ def create_source_archives(self) -> None:
txz_path.touch()
else:
with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
- archiver.add_file_data(arcpath=f"{archive_base}/VERSION.txt", data=f"{self.version}\n".encode(), mode=0o100644, time=latest_mod_time)
- archiver.add_file_data(arcpath=f"{archive_base}/{GIT_HASH_FILENAME}", data=f"{self.commit}\n".encode(), mode=0o100644, time=latest_mod_time)
-
- print(f"Adding source files of main project ...")
- project_souce_collector.add_to_archiver(archive_base=archive_base, archiver=archiver)
+ print(f"Adding source files of {self.project}...")
+ archive_tree.add_to_archiver(archive_base=archive_base, archiver=archiver)
for extra_repo in self.release_info["source"].get("extra-repos", []):
extra_repo_root = self.root / extra_repo
assert (extra_repo_root / ".git").exists(), f"{extra_repo_root} must be a git repo"
extra_repo_commit = self.executer.check_output(["git", "rev-parse", "HEAD"], dry_out=f"gitsha-extra-repo-{extra_repo}", cwd=extra_repo_root).strip()
extra_repo_source_collector = SourceCollector(root=extra_repo_root, commit=extra_repo_commit, executer=self.executer, filter=self._external_repo_path_filter)
+ print(f"Collecting sources of {extra_repo} ...")
+ extra_repo_archive_tree = extra_repo_source_collector.get_archive_file_tree()
print(f"Adding source files of {extra_repo} ...")
- extra_repo_source_collector.add_to_archiver(archive_base=f"{archive_base}/{extra_repo}", archiver=archiver)
+ extra_repo_archive_tree.add_to_archiver(archive_base=f"{archive_base}/{extra_repo}", archiver=archiver)
for file in self.release_info["source"]["checks"]:
assert f"{archive_base}/{file}" in archiver.added_files, f"'{archive_base}/{file}' must exist"
@@ -494,7 +630,8 @@ def create_dmg(self, configuration: str="Release") -> None:
xcode_project = self.root / self.release_info["dmg"]["project"]
assert xcode_project.is_dir(), f"{xcode_project} must be a directory"
assert (xcode_project / "project.pbxproj").is_file, f"{xcode_project} must contain project.pbxproj"
- dmg_in.unlink(missing_ok=True)
+ if not self.fast:
+ dmg_in.unlink(missing_ok=True)
build_xcconfig = self.release_info["dmg"].get("build-xcconfig")
if build_xcconfig:
shutil.copy(self.root / build_xcconfig, xcode_project.parent / "build.xcconfig")
@@ -524,34 +661,26 @@ def create_dmg(self, configuration: str="Release") -> None:
def git_hash_data(self) -> bytes:
return f"{self.commit}\n".encode()
- def _tar_add_git_hash(self, tar_object: tarfile.TarFile, root: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None):
- if not time:
- time = datetime.datetime(year=2024, month=4, day=1)
- path = GIT_HASH_FILENAME
- if root:
- path = f"{root}/{path}"
-
- tar_info = tarfile.TarInfo(path)
- tar_info.mode = 0o100644
- tar_info.size = len(self.git_hash_data)
- tar_info.mtime = int(time.timestamp())
- tar_object.addfile(tar_info, fileobj=io.BytesIO(self.git_hash_data))
-
def create_mingw_archives(self) -> None:
build_type = "Release"
build_parent_dir = self.root / "build-mingw"
- assert "autotools" in self.release_info["mingw"]
- assert "cmake" not in self.release_info["mingw"]
- mingw_archs = self.release_info["mingw"]["autotools"]["archs"]
+ ARCH_TO_GNU_ARCH = {
+ # "arm64": "aarch64",
+ "x86": "i686",
+ "x64": "x86_64",
+ }
ARCH_TO_TRIPLET = {
+ # "arm64": "aarch64-w64-mingw32",
"x86": "i686-w64-mingw32",
"x64": "x86_64-w64-mingw32",
}
new_env = dict(os.environ)
+ cmake_prefix_paths = []
+ mingw_deps_path = self.deps_path / "mingw-deps"
+
if "dependencies" in self.release_info["mingw"]:
- mingw_deps_path = self.deps_path / "mingw-deps"
shutil.rmtree(mingw_deps_path, ignore_errors=True)
mingw_deps_path.mkdir()
@@ -562,16 +691,24 @@ def extract_filter(member: tarfile.TarInfo, path: str, /):
if member.name.startswith("SDL"):
member.name = "/".join(Path(member.name).parts[1:])
return member
- for dep in self.release_info["dependencies"].keys():
- extract_dir = mingw_deps_path / f"extract-{dep}"
- extract_dir.mkdir()
- with chdir(extract_dir):
- tar_path = glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)[0]
+ for dep in self.release_info.get("dependencies", {}):
+ extract_path = mingw_deps_path / f"extract-{dep}"
+ extract_path.mkdir()
+ with chdir(extract_path):
+ tar_path = self.deps_path / glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)[0]
logger.info("Extracting %s to %s", tar_path, mingw_deps_path)
- with tarfile.open(self.deps_path / tar_path, mode="r:gz") as tarf:
+ assert tar_path.suffix in (".gz", ".xz")
+ with tarfile.open(tar_path, mode=f"r:{tar_path.suffix.strip('.')}") as tarf:
tarf.extractall(filter=extract_filter)
- for triplet in ARCH_TO_TRIPLET.values():
- self.executer.run(["make", f"-j{os.cpu_count()}", "-C", str(extract_dir), "install-package", f"arch={triplet}", f"prefix={str(mingw_deps_path / triplet)}"])
+ for arch, triplet in ARCH_TO_TRIPLET.items():
+ install_cmd = self.release_info["mingw"]["dependencies"][dep]["install-command"]
+ extra_configure_data = {
+ "ARCH": ARCH_TO_GNU_ARCH[arch],
+ "TRIPLET": triplet,
+ "PREFIX": str(mingw_deps_path / triplet),
+ }
+ install_cmd = configure_text(install_cmd, context=self.get_context(extra_configure_data))
+ self.executer.run(shlex.split(install_cmd), cwd=str(extract_path))
dep_binpath = mingw_deps_path / triplet / "bin"
assert dep_binpath.is_dir(), f"{dep_binpath} for PATH should exist"
@@ -580,85 +717,309 @@ def extract_filter(member: tarfile.TarInfo, path: str, /):
new_env["PATH"] = os.pathsep.join([str(dep_binpath), new_env["PATH"]])
new_env["PKG_CONFIG_PATH"] = str(dep_pkgconfig)
+ cmake_prefix_paths.append(mingw_deps_path)
new_env["CFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
new_env["CXXFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
- arch_install_paths = {}
- arch_files = {}
- for arch in mingw_archs:
- triplet = ARCH_TO_TRIPLET[arch]
- new_env["CC"] = f"{triplet}-gcc"
- new_env["CXX"] = f"{triplet}-g++"
- new_env["RC"] = f"{triplet}-windres"
-
- build_path = build_parent_dir / f"build-{triplet}"
- install_path = build_parent_dir / f"install-{triplet}"
- arch_install_paths[arch] = install_path
- shutil.rmtree(install_path, ignore_errors=True)
- build_path.mkdir(parents=True, exist_ok=True)
- with self.section_printer.group(f"Configuring MinGW {triplet}"):
- extra_args = [arg.replace("@DEP_PREFIX@", str(mingw_deps_path / triplet)) for arg in self.release_info["mingw"]["autotools"]["args"]]
- assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
- self.executer.run([
- self.root / "configure",
- f"--prefix={install_path}",
-
(Patch may be truncated, please check the link at the top of this post.)