From e4126d8d6f24804fddc5dbb9821f1866b97d66b0 Mon Sep 17 00:00:00 2001
From: Anonymous Maarten <[EMAIL REDACTED]>
Date: Fri, 4 Oct 2024 01:40:42 +0200
Subject: [PATCH] Add parametrized release script
[ci skip]
---
.github/workflows/release.yml | 23 +-
build-scripts/build-release.py | 973 ++++++++++++------
build-scripts/create-release.py | 7 +-
build-scripts/release-info.json | 119 +++
mingw/pkg-support/Makefile | 11 +-
.../cmake/sdl2-config-version.cmake | 4 +-
mingw/pkg-support/cmake/sdl2-config.cmake | 4 +-
7 files changed, 800 insertions(+), 341 deletions(-)
create mode 100644 build-scripts/release-info.json
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index f6f0c56665a8d..c740c2399158a 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -37,9 +37,8 @@ jobs:
shell: bash
run: |
python build-scripts/build-release.py \
- --create source \
+ --actions source \
--commit ${{ inputs.commit }} \
- --project SDL2 \
--root "${{ github.workspace }}/SDL" \
--github \
--debug
@@ -93,7 +92,7 @@ jobs:
- name: 'Set up Python'
uses: actions/setup-python@v5
with:
- python-version: '3.10'
+ python-version: '3.11'
- name: 'Fetch build-release.py'
uses: actions/checkout@v4
with:
@@ -114,9 +113,8 @@ jobs:
shell: bash
run: |
python build-scripts/build-release.py \
- --create framework \
+ --actions dmg \
--commit ${{ inputs.commit }} \
- --project SDL2 \
--root "${{ steps.tar.outputs.path }}" \
--github \
--debug
@@ -192,7 +190,7 @@ jobs:
- name: 'Set up Python'
uses: actions/setup-python@v5
with:
- python-version: '3.10'
+ python-version: '3.11'
- name: 'Fetch build-release.py'
uses: actions/checkout@v4
with:
@@ -213,9 +211,8 @@ jobs:
id: releaser
run: |
python build-scripts/build-release.py `
- --create win32 `
+ --actions msvc `
--commit ${{ inputs.commit }} `
- --project SDL2 `
--root "${{ steps.zip.outputs.path }}" `
--github `
--debug
@@ -310,7 +307,7 @@ jobs:
- name: 'Set up Python'
uses: actions/setup-python@v5
with:
- python-version: '3.10'
+ python-version: '3.11'
- name: 'Fetch build-release.py'
uses: actions/checkout@v4
with:
@@ -334,9 +331,8 @@ jobs:
id: releaser
run: |
python build-scripts/build-release.py \
- --create mingw \
+ --actions mingw \
--commit ${{ inputs.commit }} \
- --project SDL2 \
--root "${{ steps.tar.outputs.path }}" \
--github \
--debug
@@ -370,12 +366,13 @@ jobs:
mkdir -p /tmp/tardir
tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}"
echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
- - name: 'Untar ${{ needs.mingw.outputs.mingw-devel-tar-gz }}'
+ - name: 'Untar and install ${{ needs.mingw.outputs.mingw-devel-tar-gz }}'
id: bin
run: |
mkdir -p /tmp/mingw-tardir
tar -C /tmp/mingw-tardir -v -x -f "${{ github.workspace }}/${{ needs.mingw.outputs.mingw-devel-tar-gz }}"
- echo "path=/tmp/mingw-tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+ make -C /tmp/mingw-tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }} cross CROSS_PATH=/tmp/deps-mingw
+ echo "path=/tmp/deps-mingw" >>$GITHUB_OUTPUT
- name: 'CMake (configure + build) i686'
run: |
set -e
diff --git a/build-scripts/build-release.py b/build-scripts/build-release.py
index 0da88a075bcb8..0625ad6e1a6d7 100755
--- a/build-scripts/build-release.py
+++ b/build-scripts/build-release.py
@@ -1,13 +1,22 @@
#!/usr/bin/env python
+"""
+This script is shared between SDL2, SDL2_image, SDL2_mixer and SDL2_ttf.
+Don't specialize this script for doing project-specific modifications.
+Rather, modify release-info.json.
+"""
+
import argparse
import collections
+from collections.abc import Callable
import contextlib
import datetime
+import fnmatch
import glob
import io
import json
import logging
+import multiprocessing
import os
from pathlib import Path
import platform
@@ -24,18 +33,43 @@
logger = logging.getLogger(__name__)
-VcArchDevel = collections.namedtuple("VcArchDevel", ("dll", "pdb", "imp", "main", "test"))
GIT_HASH_FILENAME = ".git-hash"
-ANDROID_AVAILABLE_ABIS = [
- "armeabi-v7a",
- "arm64-v8a",
- "x86",
- "x86_64",
-]
-ANDROID_MINIMUM_API = 19
-ANDROID_TARGET_API = 29
-ANDROID_MINIMUM_NDK = 21
+
+def safe_isotime_to_datetime(str_isotime: str) -> datetime.datetime:
+ try:
+ return datetime.datetime.fromisoformat(str_isotime)
+ except ValueError:
+ pass
+ logger.warning("Invalid iso time: %s", str_isotime)
+ if str_isotime[-6:-5] in ("+", "-"):
+ # Commits can have isotime with invalid timezone offset (e.g. "2021-07-04T20:01:40+32:00")
+ modified_str_isotime = str_isotime[:-6] + "+00:00"
+ try:
+ return datetime.datetime.fromisoformat(modified_str_isotime)
+ except ValueError:
+ pass
+ raise ValueError(f"Invalid isotime: {str_isotime}")
+
+
+class VsArchPlatformConfig:
+ def __init__(self, arch: str, platform: str, configuration: str):
+ self.arch = arch
+ self.platform = platform
+ self.configuration = configuration
+
+ def configure(self, s: str) -> str:
+ return s.replace("@ARCH@", self.arch).replace("@PLATFORM@", self.platform).replace("@CONFIGURATION@", self.configuration)
+
+
+@contextlib.contextmanager
+def chdir(path):
+ original_cwd = os.getcwd()
+ try:
+ os.chdir(path)
+ yield
+ finally:
+ os.chdir(original_cwd)
class Executer:
@@ -43,14 +77,18 @@ def __init__(self, root: Path, dry: bool=False):
self.root = root
self.dry = dry
- def run(self, cmd, stdout=False, dry_out=None, force=False):
+ def run(self, cmd, cwd=None, env=None):
+ logger.info("Executing args=%r", cmd)
sys.stdout.flush()
+ if not self.dry:
+ subprocess.run(cmd, check=True, cwd=cwd or self.root, env=env, text=True)
+
+ def check_output(self, cmd, cwd=None, dry_out=None, env=None, text=True):
logger.info("Executing args=%r", cmd)
- if self.dry and not force:
- if stdout:
- return subprocess.run(["echo", "-E", dry_out or ""], stdout=subprocess.PIPE if stdout else None, text=True, check=True, cwd=self.root)
- else:
- return subprocess.run(cmd, stdout=subprocess.PIPE if stdout else None, text=True, check=True, cwd=self.root)
+ sys.stdout.flush()
+ if self.dry:
+ return dry_out
+ return subprocess.check_output(cmd, cwd=cwd or self.root, env=env, text=text)
class SectionPrinter:
@@ -103,7 +141,7 @@ def find_vsdevcmd(self, year: typing.Optional[str]=None) -> typing.Optional[Path
return None
vswhere_spec.extend(["-version", f"[{version},{version+1})"])
vswhere_cmd = ["vswhere"] + vswhere_spec + ["-property", "installationPath"]
- vs_install_path = Path(self.executer.run(vswhere_cmd, stdout=True, dry_out="/tmp").stdout.strip())
+ vs_install_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp").strip())
logger.info("VS install_path = %s", vs_install_path)
assert vs_install_path.is_dir(), "VS installation path does not exist"
vsdevcmd_path = vs_install_path / "Common7/Tools/vsdevcmd.bat"
@@ -116,7 +154,7 @@ def find_vsdevcmd(self, year: typing.Optional[str]=None) -> typing.Optional[Path
def find_msbuild(self) -> typing.Optional[Path]:
vswhere_cmd = ["vswhere", "-latest", "-requires", "Microsoft.Component.MSBuild", "-find", r"MSBuild\**\Bin\MSBuild.exe"]
- msbuild_path = Path(self.executer.run(vswhere_cmd, stdout=True, dry_out="/tmp/MSBuild.exe").stdout.strip())
+ msbuild_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp/MSBuild.exe").strip())
logger.info("MSBuild path = %s", msbuild_path)
if self.dry:
msbuild_path.parent.mkdir(parents=True, exist_ok=True)
@@ -124,11 +162,11 @@ def find_msbuild(self) -> typing.Optional[Path]:
assert msbuild_path.is_file(), "MSBuild.exe does not exist"
return msbuild_path
- def build(self, arch: str, platform: str, configuration: str, projects: list[Path]):
+ def build(self, arch_platform: VsArchPlatformConfig, projects: list[Path]):
assert projects, "Need at least one project to build"
- vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch}"
- msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={platform} /p:Configuration={configuration}" for project in projects])
+ vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch_platform.arch}"
+ msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={arch_platform.platform} /p:Configuration={arch_platform.configuration}" for project in projects])
bat_contents = f"{vsdev_cmd_str} && {msbuild_cmd_str}\n"
bat_path = Path(tempfile.gettempdir()) / "cmd.bat"
with bat_path.open("w") as f:
@@ -139,35 +177,147 @@ def build(self, arch: str, platform: str, configuration: str, projects: list[Pat
self.executer.run(cmd)
-class Releaser:
- def __init__(self, project: str, commit: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str):
- self.project = project
- self.version = self.extract_sdl_version(root=root, project=project)
+class Archiver:
+ def __init__(self, zip_path: typing.Optional[Path]=None, tgz_path: typing.Optional[Path]=None, txz_path: typing.Optional[Path]=None):
+ self._zip_files = []
+ self._tar_files = []
+ self._added_files = set()
+ if zip_path:
+ self._zip_files.append(zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED))
+ if tgz_path:
+ self._tar_files.append(tarfile.open(tgz_path, "w:gz"))
+ if txz_path:
+ self._tar_files.append(tarfile.open(txz_path, "w:xz"))
+
+ @property
+ def added_files(self) -> set[str]:
+ return self._added_files
+
+ def add_file_data(self, arcpath: str, data: bytes, mode: int, time: datetime.datetime):
+ for zf in self._zip_files:
+ file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
+ zip_info = zipfile.ZipInfo(filename=arcpath, date_time=file_data_time)
+ zip_info.external_attr = mode << 16
+ zip_info.compress_type = zipfile.ZIP_DEFLATED
+ zf.writestr(zip_info, data=data)
+ for tf in self._tar_files:
+ tar_info = tarfile.TarInfo(arcpath)
+ tar_info.type = tarfile.REGTYPE
+ tar_info.mode = mode
+ tar_info.size = len(data)
+ tar_info.mtime = int(time.timestamp())
+ tf.addfile(tar_info, fileobj=io.BytesIO(data))
+
+ self._added_files.add(arcpath)
+
+ def add_symlink(self, arcpath: str, target: str, time: datetime.datetime, files_for_zip):
+ for zf in self._zip_files:
+ file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
+ for f in files_for_zip:
+ zip_info = zipfile.ZipInfo(filename=f["arcpath"], date_time=file_data_time)
+ zip_info.external_attr = f["mode"] << 16
+ zip_info.compress_type = zipfile.ZIP_DEFLATED
+ zf.writestr(zip_info, data=f["data"])
+ for tf in self._tar_files:
+ tar_info = tarfile.TarInfo(arcpath)
+ tar_info.type = tarfile.SYMTYPE
+ tar_info.mode = 0o777
+ tar_info.mtime = int(time.timestamp())
+ tar_info.linkname = target
+ tf.addfile(tar_info)
+
+ self._added_files.update(f["arcpath"] for f in files_for_zip)
+
+ def add_git_hash(self, commit: str, arcdir: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None):
+ arcpath = GIT_HASH_FILENAME
+ if arcdir and arcdir[-1:] != "/":
+ arcpath = f"{arcdir}/{arcpath}"
+ if not time:
+ time = datetime.datetime(year=2024, month=4, day=1)
+ data = f"{commit}\n".encode()
+ self.add_file_data(arcpath=arcpath, data=data, mode=0o100644, time=time)
+
+ def add_file_path(self, arcpath: str, path: Path):
+ assert path.is_file(), f"{path} should be a file"
+ for zf in self._zip_files:
+ zf.write(path, arcname=arcpath)
+ for tf in self._tar_files:
+ tf.add(path, arcname=arcpath)
+
+ def add_file_directory(self, arcdirpath: str, dirpath: Path):
+ assert dirpath.is_dir()
+ if arcdirpath and arcdirpath[-1:] != "/":
+ arcdirpath += "/"
+ for f in dirpath.iterdir():
+ if f.is_file():
+ arcpath = f"{arcdirpath}{f.name}"
+ logger.debug("Adding %s to %s", f, arcpath)
+ self.add_file_path(arcpath=arcpath, path=f)
+
+ def close(self):
+ # Archiver is intentionally made invalid after this function
+ del self._zip_files
+ self._zip_files = None
+ del self._tar_files
+ self._tar_files = None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.close()
+
+
+class SourceCollector:
+ TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "symtarget", "directory", "time"))
+ def __init__(self, root: Path, commit: str, filter: typing.Optional[Callable[[str], bool]], executer: Executer):
self.root = root
self.commit = commit
- self.dist_path = dist_path
- self.section_printer = section_printer
+ self.filter = filter
self.executer = executer
- self.cmake_generator = cmake_generator
+ self._git_contents: typing.Optional[dict[str, SourceCollector.TreeItem]] = None
- self.artifacts: dict[str, Path] = {}
+ def _get_git_contents(self) -> dict[str, TreeItem]:
+ contents_tgz = subprocess.check_output(["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"], cwd=self.root, text=False)
+ tar_archive = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz")
+ filenames = tuple(m.name for m in tar_archive if (m.isfile() or m.issym()))
- @property
- def dry(self) -> bool:
- return self.executer.dry
+ file_times = self._get_file_times(paths=filenames)
+ git_contents = {}
+ for ti in tar_archive:
+ if self.filter and not self.filter(ti.name):
+ continue
+ data = None
+ symtarget = None
+ directory = False
+ file_time = None
+ if ti.isfile():
+ contents_file = tar_archive.extractfile(ti.name)
+ data = contents_file.read()
+ file_time = file_times[ti.name]
+ elif ti.issym():
+ symtarget = ti.linkname
+ file_time = file_times[ti.name]
+ elif ti.isdir():
+ directory = True
+ else:
+ raise ValueError(f"{ti.name}: unknown type")
+ git_contents[ti.name] = self.TreeItem(path=ti.name, mode=ti.mode, data=data, symtarget=symtarget, directory=directory, time=file_time)
+ return git_contents
- def prepare(self):
- logger.debug("Creating dist folder")
- self.dist_path.mkdir(parents=True, exist_ok=True)
+ @property
+ def git_contents(self) -> dict[str, TreeItem]:
+ if self._git_contents is None:
+ self._git_contents = self._get_git_contents()
+ return self._git_contents
- TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "time"))
def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime]:
dry_out = textwrap.dedent("""\
time=2024-03-14T15:40:25-07:00
M\tCMakeLists.txt
""")
- git_log_out = self.executer.run(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], stdout=True, dry_out=dry_out).stdout.splitlines(keepends=False)
+ git_log_out = self.executer.check_output(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], dry_out=dry_out, cwd=self.root).splitlines(keepends=False)
current_time = None
set_paths = set(paths)
path_times: dict[str, datetime.datetime] = {}
@@ -175,98 +325,191 @@ def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime
if not line:
continue
if line.startswith("time="):
- current_time = datetime.datetime.fromisoformat(line.removeprefix("time="))
+ current_time = safe_isotime_to_datetime(line.removeprefix("time="))
continue
mod_type, file_paths = line.split(maxsplit=1)
assert current_time is not None
for file_path in file_paths.split("\t"):
if file_path in set_paths and file_path not in path_times:
path_times[file_path] = current_time
- assert set(path_times.keys()) == set_paths
+
+ # FIXME: find out why some files are not shown in "git log"
+ # assert set(path_times.keys()) == set_paths
+ if set(path_times.keys()) != set_paths:
+ found_times = set(path_times.keys())
+ paths_without_times = set_paths.difference(found_times)
+ logger.warning("No times found for these paths: %s", paths_without_times)
+ max_time = max(time for time in path_times.values())
+ for path in paths_without_times:
+ path_times[path] = max_time
+
return path_times
- @staticmethod
- def _path_filter(path: str):
+ def add_to_archiver(self, archive_base: str, archiver: Archiver):
+ remaining_symlinks = set()
+ added_files = dict()
+
+ def calculate_symlink_target(s: SourceCollector.TreeItem) -> str:
+ dest_dir = os.path.dirname(s.path)
+ if dest_dir:
+ dest_dir += "/"
+ target = dest_dir + s.symtarget
+ while True:
+ new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target)
+ print(f"{target=} {new_target=}")
+ target = new_target
+ if not n:
+ break
+ return target
+
+ # Add files in first pass
+ for git_file in self.git_contents.values():
+ if git_file.data is not None:
+ archiver.add_file_data(arcpath=f"{archive_base}/{git_file.path}", data=git_file.data, time=git_file.time, mode=git_file.mode)
+ added_files[git_file.path] = git_file
+ elif git_file.symtarget is not None:
+ remaining_symlinks.add(git_file)
+
+ # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive
+ while True:
+ if not remaining_symlinks:
+ break
+ symlinks_this_time = set()
+ extra_added_files = {}
+ for symlink in remaining_symlinks:
+ symlink_files_for_zip = {}
+ symlink_target_path = calculate_symlink_target(symlink)
+ if symlink_target_path in added_files:
+ symlink_files_for_zip[symlink.path] = added_files[symlink_target_path]
+ else:
+ symlink_target_path_slash = symlink_target_path + "/"
+ for added_file in added_files:
+ if added_file.startswith(symlink_target_path_slash):
+ path_in_symlink = symlink.path + "/" + added_file.removeprefix(symlink_target_path_slash)
+ symlink_files_for_zip[path_in_symlink] = added_files[added_file]
+ if symlink_files_for_zip:
+ symlinks_this_time.add(symlink)
+ extra_added_files.update(symlink_files_for_zip)
+ files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()]
+ archiver.add_symlink(arcpath=f"{archive_base}/{symlink.path}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip)
+ # if not symlinks_this_time:
+ # logger.info("files added: %r", set(path for path in added_files.keys()))
+ assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}"
+ remaining_symlinks.difference_update(symlinks_this_time)
+ added_files.update(extra_added_files)
+
+
+class Releaser:
+ def __init__(self, release_info: dict, commit: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool):
+ self.release_info = release_info
+ self.project = release_info["name"]
+ self.version = self.extract_sdl_version(root=root, release_info=release_info)
+ self.root = root
+ self.commit = commit
+ self.dist_path = dist_path
+ self.section_printer = section_printer
+ self.executer = executer
+ self.cmake_generator = cmake_generator
+ self.cpu_count = multiprocessing.cpu_count()
+ self.deps_path = deps_path
+ self.overwrite = overwrite
+ self.github = github
+ self.fast = fast
+
+ self.artifacts: dict[str, Path] = {}
+
+ @property
+ def dry(self) -> bool:
+ return self.executer.dry
+
+ def prepare(self):
+ logger.debug("Creating dist folder")
+ self.dist_path.mkdir(parents=True, exist_ok=True)
+
+ @classmethod
+ def _path_filter(cls, path: str) -> bool:
+ if ".gitmodules" in path:
+ return True
if path.startswith(".git"):
return False
return True
- def _get_git_contents(self) -> dict[str, TreeItem]:
- contents_tgz = subprocess.check_output(["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"], text=False)
- contents = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz")
- filenames = tuple(m.name for m in contents if m.isfile())
- assert "src/SDL.c" in filenames
- assert "include/SDL.h" in filenames
- file_times = self._get_file_times(filenames)
- git_contents = {}
- for ti in contents:
- if not ti.isfile():
- continue
- if not self._path_filter(ti.name):
- continue
- contents_file = contents.extractfile(ti.name)
- assert contents_file, f"{ti.name} is not a file"
- git_contents[ti.name] = self.TreeItem(path=ti.name, mode=ti.mode, data=contents_file.read(), time=file_times[ti.name])
- return git_contents
+ @classmethod
+ def _external_repo_path_filter(cls, path: str) -> bool:
+ if not cls._path_filter(path):
+ return False
+ if path.startswith("test/") or path.startswith("tests/"):
+ return False
+ return True
def create_source_archives(self) -> None:
archive_base = f"{self.project}-{self.version}"
- git_contents = self._get_git_contents()
- git_files = list(git_contents.values())
- assert len(git_contents) == len(git_files)
-
- latest_mod_time = max(item.time for item in git_files)
+ project_souce_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter)
- git_files.append(self.TreeItem(path="VERSION.txt", data=f"{self.version}\n".encode(), mode=0o100644, time=latest_mod_time))
- git_files.append(self.TreeItem(path=GIT_HASH_FILENAME, data=f"{self.commit}\n".encode(), mode=0o100644, time=latest_mod_time))
-
- git_files.sort(key=lambda v: v.time)
+ latest_mod_time = max(item.time for item in project_souce_collector.git_contents.values() if item.time)
zip_path = self.dist_path / f"{archive_base}.zip"
- logger.info("Creating .zip source archive (%s)...", zip_path)
+ tgz_path = self.dist_path / f"{archive_base}.tar.gz"
+ txz_path = self.dist_path / f"{archive_base}.tar.xz"
+
+ logger.info("Creating zip/tgz/txz source archives ...")
if self.dry:
zip_path.touch()
+ tgz_path.touch()
+ txz_path.touch()
else:
- with zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED) as zip_object:
- for git_file in git_files:
- file_data_time = (git_file.time.year, git_file.time.month, git_file.time.day, git_file.time.hour, git_file.time.minute, git_file.time.second)
- zip_info = zipfile.ZipInfo(filename=f"{archive_base}/{git_file.path}", date_time=file_data_time)
- zip_info.external_attr = git_file.mode << 16
- zip_info.compress_type = zipfile.ZIP_DEFLATED
- zip_object.writestr(zip_info, data=git_file.data)
- self.artifacts["src-zip"] = zip_path
+ with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
+ archiver.add_file_data(arcpath=f"{archive_base}/VERSION.txt", data=f"{self.version}\n".encode(), mode=0o100644, time=latest_mod_time)
+ archiver.add_file_data(arcpath=f"{archive_base}/{GIT_HASH_FILENAME}", data=f"{self.commit}\n".encode(), mode=0o100644, time=latest_mod_time)
- tar_types = (
- (".tar.gz", "gz"),
- (".tar.xz", "xz"),
- )
- for ext, comp in tar_types:
- tar_path = self.dist_path / f"{archive_base}{ext}"
- logger.info("Creating %s source archive (%s)...", ext, tar_path)
- if self.dry:
- tar_path.touch()
- else:
- with tarfile.open(tar_path, f"w:{comp}") as tar_object:
- for git_file in git_files:
- tar_info = tarfile.TarInfo(f"{archive_base}/{git_file.path}")
- tar_info.mode = git_file.mode
- tar_info.size = len(git_file.data)
- tar_info.mtime = git_file.time.timestamp()
- tar_object.addfile(tar_info, fileobj=io.BytesIO(git_file.data))
-
- if tar_path.suffix == ".gz":
- # Zero the embedded timestamp in the gzip'ed tarball
- with open(tar_path, "r+b") as f:
- f.seek(4, 0)
- f.write(b"\x00\x00\x00\x00")
+ print(f"Adding source files of main project ...")
+ project_souce_collector.add_to_archiver(archive_base=archive_base, archiver=archiver)
+
+ for extra_repo in self.release_info["source"].get("extra-repos", []):
+ extra_repo_root = self.root / extra_repo
+ assert (extra_repo_root / ".git").exists(), f"{extra_repo_root} must be a git repo"
+ extra_repo_commit = self.executer.check_output(["git", "rev-parse", "HEAD"], dry_out=f"gitsha-extra-repo-{extra_repo}", cwd=extra_repo_root).strip()
+ extra_repo_source_collector = SourceCollector(root=extra_repo_root, commit=extra_repo_commit, executer=self.executer, filter=self._external_repo_path_filter)
+ print(f"Adding source files of {extra_repo} ...")
+ extra_repo_source_collector.add_to_archiver(archive_base=f"{archive_base}/{extra_repo}", archiver=archiver)
+
+ for file in self.release_info["source"]["checks"]:
+ assert f"{archive_base}/{file}" in archiver.added_files, f"'{archive_base}/{file}' must exist"
- self.artifacts[f"src-tar-{comp}"] = tar_path
+ logger.info("... done")
- def create_framework(self, configuration: str="Release") -> None:
- dmg_in = self.root / f"Xcode/SDL/build/{self.project}.dmg"
+ self.artifacts["src-zip"] = zip_path
+ self.artifacts["src-tar-gz"] = tgz_path
+ self.artifacts["src-tar-xz"] = txz_path
+
+ if not self.dry:
+ with tgz_path.open("r+b") as f:
+ # Zero the embedded timestamp in the gzip'ed tarball
+ f.seek(4, 0)
+ f.write(b"\x00\x00\x00\x00")
+
+ def create_dmg(self, configuration: str="Release") -> None:
+ dmg_in = self.root / self.release_info["dmg"]["path"]
+ xcode_project = self.root / self.release_info["dmg"]["project"]
+ assert xcode_project.is_dir(), f"{xcode_project} must be a directory"
+ assert (xcode_project / "project.pbxproj").is_file, f"{xcode_project} must contain project.pbxproj"
dmg_in.unlink(missing_ok=True)
- self.executer.run(["xcodebuild", "-project", str(self.root / "Xcode/SDL/SDL.xcodeproj"), "-target", "Standard DMG", "-configuration", configuration])
+ build_xcconfig = self.release_info["dmg"].get("build-xcconfig")
+ if build_xcconfig:
+ shutil.copy(self.root / build_xcconfig, xcode_project.parent / "build.xcconfig")
+
+ xcode_scheme = self.release_info["dmg"].get("scheme")
+ xcode_target = self.release_info["dmg"].get("target")
+ assert xcode_scheme or xcode_target, "dmg needs scheme or target"
+ assert not (xcode_scheme and xcode_target), "dmg cannot have both scheme and target set"
+ if xcode_scheme:
+ scheme_or_target = "-scheme"
+ target_like = xcode_scheme
+ else:
+ scheme_or_target = "-target"
+ target_like = xcode_target
+ self.executer.run(["xcodebuild", "ONLY_ACTIVE_ARCH=NO", "-project", xcode_project, scheme_or_target, target_like, "-configuration", configuration])
if self.dry:
dmg_in.parent.mkdir(parents=True, exist_ok=True)
dmg_in.touch()
@@ -294,225 +537,316 @@ def _tar_add_git_hash(self, tar_object: tarfile.TarFile, root: typing.Optional[s
tar_info.mtime = int(time.timestamp())
tar_object.addfile(tar_info, fileobj=io.BytesIO(self.git_hash_data))
- def _zip_add_git_hash(self, zip_file: zipfile.ZipFile, root: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None):
- if not time:
- time = datetime.datetime(year=2024, month=4, day=1)
-
(Patch may be truncated, please check the link at the top of this post.)