|
@@ -1,13 +1,22 @@
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
+"""
|
|
|
+This script is shared between SDL2, SDL2_image, SDL2_mixer and SDL2_ttf.
|
|
|
+Don't specialize this script for doing project-specific modifications.
|
|
|
+Rather, modify release-info.json.
|
|
|
+"""
|
|
|
+
|
|
|
import argparse
|
|
|
import collections
|
|
|
+from collections.abc import Callable
|
|
|
import contextlib
|
|
|
import datetime
|
|
|
+import fnmatch
|
|
|
import glob
|
|
|
import io
|
|
|
import json
|
|
|
import logging
|
|
|
+import multiprocessing
|
|
|
import os
|
|
|
from pathlib import Path
|
|
|
import platform
|
|
@@ -24,18 +33,43 @@ import zipfile
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
-VcArchDevel = collections.namedtuple("VcArchDevel", ("dll", "pdb", "imp", "main", "test"))
|
|
|
GIT_HASH_FILENAME = ".git-hash"
|
|
|
|
|
|
-ANDROID_AVAILABLE_ABIS = [
|
|
|
- "armeabi-v7a",
|
|
|
- "arm64-v8a",
|
|
|
- "x86",
|
|
|
- "x86_64",
|
|
|
-]
|
|
|
-ANDROID_MINIMUM_API = 19
|
|
|
-ANDROID_TARGET_API = 29
|
|
|
-ANDROID_MINIMUM_NDK = 21
|
|
|
+
|
|
|
+def safe_isotime_to_datetime(str_isotime: str) -> datetime.datetime:
|
|
|
+ try:
|
|
|
+ return datetime.datetime.fromisoformat(str_isotime)
|
|
|
+ except ValueError:
|
|
|
+ pass
|
|
|
+ logger.warning("Invalid iso time: %s", str_isotime)
|
|
|
+ if str_isotime[-6:-5] in ("+", "-"):
|
|
|
+ # Commits can have isotime with invalid timezone offset (e.g. "2021-07-04T20:01:40+32:00")
|
|
|
+ modified_str_isotime = str_isotime[:-6] + "+00:00"
|
|
|
+ try:
|
|
|
+ return datetime.datetime.fromisoformat(modified_str_isotime)
|
|
|
+ except ValueError:
|
|
|
+ pass
|
|
|
+ raise ValueError(f"Invalid isotime: {str_isotime}")
|
|
|
+
|
|
|
+
|
|
|
+class VsArchPlatformConfig:
|
|
|
+ def __init__(self, arch: str, platform: str, configuration: str):
|
|
|
+ self.arch = arch
|
|
|
+ self.platform = platform
|
|
|
+ self.configuration = configuration
|
|
|
+
|
|
|
+ def configure(self, s: str) -> str:
|
|
|
+ return s.replace("@ARCH@", self.arch).replace("@PLATFORM@", self.platform).replace("@CONFIGURATION@", self.configuration)
|
|
|
+
|
|
|
+
|
|
|
+@contextlib.contextmanager
|
|
|
+def chdir(path):
|
|
|
+ original_cwd = os.getcwd()
|
|
|
+ try:
|
|
|
+ os.chdir(path)
|
|
|
+ yield
|
|
|
+ finally:
|
|
|
+ os.chdir(original_cwd)
|
|
|
|
|
|
|
|
|
class Executer:
|
|
@@ -43,14 +77,18 @@ class Executer:
|
|
|
self.root = root
|
|
|
self.dry = dry
|
|
|
|
|
|
- def run(self, cmd, stdout=False, dry_out=None, force=False):
|
|
|
+ def run(self, cmd, cwd=None, env=None):
|
|
|
+ logger.info("Executing args=%r", cmd)
|
|
|
sys.stdout.flush()
|
|
|
+ if not self.dry:
|
|
|
+ subprocess.run(cmd, check=True, cwd=cwd or self.root, env=env, text=True)
|
|
|
+
|
|
|
+ def check_output(self, cmd, cwd=None, dry_out=None, env=None, text=True):
|
|
|
logger.info("Executing args=%r", cmd)
|
|
|
- if self.dry and not force:
|
|
|
- if stdout:
|
|
|
- return subprocess.run(["echo", "-E", dry_out or ""], stdout=subprocess.PIPE if stdout else None, text=True, check=True, cwd=self.root)
|
|
|
- else:
|
|
|
- return subprocess.run(cmd, stdout=subprocess.PIPE if stdout else None, text=True, check=True, cwd=self.root)
|
|
|
+ sys.stdout.flush()
|
|
|
+ if self.dry:
|
|
|
+ return dry_out
|
|
|
+ return subprocess.check_output(cmd, cwd=cwd or self.root, env=env, text=text)
|
|
|
|
|
|
|
|
|
class SectionPrinter:
|
|
@@ -103,7 +141,7 @@ class VisualStudio:
|
|
|
return None
|
|
|
vswhere_spec.extend(["-version", f"[{version},{version+1})"])
|
|
|
vswhere_cmd = ["vswhere"] + vswhere_spec + ["-property", "installationPath"]
|
|
|
- vs_install_path = Path(self.executer.run(vswhere_cmd, stdout=True, dry_out="/tmp").stdout.strip())
|
|
|
+ vs_install_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp").strip())
|
|
|
logger.info("VS install_path = %s", vs_install_path)
|
|
|
assert vs_install_path.is_dir(), "VS installation path does not exist"
|
|
|
vsdevcmd_path = vs_install_path / "Common7/Tools/vsdevcmd.bat"
|
|
@@ -116,7 +154,7 @@ class VisualStudio:
|
|
|
|
|
|
def find_msbuild(self) -> typing.Optional[Path]:
|
|
|
vswhere_cmd = ["vswhere", "-latest", "-requires", "Microsoft.Component.MSBuild", "-find", r"MSBuild\**\Bin\MSBuild.exe"]
|
|
|
- msbuild_path = Path(self.executer.run(vswhere_cmd, stdout=True, dry_out="/tmp/MSBuild.exe").stdout.strip())
|
|
|
+ msbuild_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp/MSBuild.exe").strip())
|
|
|
logger.info("MSBuild path = %s", msbuild_path)
|
|
|
if self.dry:
|
|
|
msbuild_path.parent.mkdir(parents=True, exist_ok=True)
|
|
@@ -124,11 +162,11 @@ class VisualStudio:
|
|
|
assert msbuild_path.is_file(), "MSBuild.exe does not exist"
|
|
|
return msbuild_path
|
|
|
|
|
|
- def build(self, arch: str, platform: str, configuration: str, projects: list[Path]):
|
|
|
+ def build(self, arch_platform: VsArchPlatformConfig, projects: list[Path]):
|
|
|
assert projects, "Need at least one project to build"
|
|
|
|
|
|
- vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch}"
|
|
|
- msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={platform} /p:Configuration={configuration}" for project in projects])
|
|
|
+ vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch_platform.arch}"
|
|
|
+ msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={arch_platform.platform} /p:Configuration={arch_platform.configuration}" for project in projects])
|
|
|
bat_contents = f"{vsdev_cmd_str} && {msbuild_cmd_str}\n"
|
|
|
bat_path = Path(tempfile.gettempdir()) / "cmd.bat"
|
|
|
with bat_path.open("w") as f:
|
|
@@ -139,35 +177,147 @@ class VisualStudio:
|
|
|
self.executer.run(cmd)
|
|
|
|
|
|
|
|
|
-class Releaser:
|
|
|
- def __init__(self, project: str, commit: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str):
|
|
|
- self.project = project
|
|
|
- self.version = self.extract_sdl_version(root=root, project=project)
|
|
|
+class Archiver:
|
|
|
+ def __init__(self, zip_path: typing.Optional[Path]=None, tgz_path: typing.Optional[Path]=None, txz_path: typing.Optional[Path]=None):
|
|
|
+ self._zip_files = []
|
|
|
+ self._tar_files = []
|
|
|
+ self._added_files = set()
|
|
|
+ if zip_path:
|
|
|
+ self._zip_files.append(zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED))
|
|
|
+ if tgz_path:
|
|
|
+ self._tar_files.append(tarfile.open(tgz_path, "w:gz"))
|
|
|
+ if txz_path:
|
|
|
+ self._tar_files.append(tarfile.open(txz_path, "w:xz"))
|
|
|
+
|
|
|
+ @property
|
|
|
+ def added_files(self) -> set[str]:
|
|
|
+ return self._added_files
|
|
|
+
|
|
|
+ def add_file_data(self, arcpath: str, data: bytes, mode: int, time: datetime.datetime):
|
|
|
+ for zf in self._zip_files:
|
|
|
+ file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
|
|
|
+ zip_info = zipfile.ZipInfo(filename=arcpath, date_time=file_data_time)
|
|
|
+ zip_info.external_attr = mode << 16
|
|
|
+ zip_info.compress_type = zipfile.ZIP_DEFLATED
|
|
|
+ zf.writestr(zip_info, data=data)
|
|
|
+ for tf in self._tar_files:
|
|
|
+ tar_info = tarfile.TarInfo(arcpath)
|
|
|
+ tar_info.type = tarfile.REGTYPE
|
|
|
+ tar_info.mode = mode
|
|
|
+ tar_info.size = len(data)
|
|
|
+ tar_info.mtime = int(time.timestamp())
|
|
|
+ tf.addfile(tar_info, fileobj=io.BytesIO(data))
|
|
|
+
|
|
|
+ self._added_files.add(arcpath)
|
|
|
+
|
|
|
+ def add_symlink(self, arcpath: str, target: str, time: datetime.datetime, files_for_zip):
|
|
|
+ for zf in self._zip_files:
|
|
|
+ file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
|
|
|
+ for f in files_for_zip:
|
|
|
+ zip_info = zipfile.ZipInfo(filename=f["arcpath"], date_time=file_data_time)
|
|
|
+ zip_info.external_attr = f["mode"] << 16
|
|
|
+ zip_info.compress_type = zipfile.ZIP_DEFLATED
|
|
|
+ zf.writestr(zip_info, data=f["data"])
|
|
|
+ for tf in self._tar_files:
|
|
|
+ tar_info = tarfile.TarInfo(arcpath)
|
|
|
+ tar_info.type = tarfile.SYMTYPE
|
|
|
+ tar_info.mode = 0o777
|
|
|
+ tar_info.mtime = int(time.timestamp())
|
|
|
+ tar_info.linkname = target
|
|
|
+ tf.addfile(tar_info)
|
|
|
+
|
|
|
+ self._added_files.update(f["arcpath"] for f in files_for_zip)
|
|
|
+
|
|
|
+ def add_git_hash(self, commit: str, arcdir: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None):
|
|
|
+ arcpath = GIT_HASH_FILENAME
|
|
|
+ if arcdir and arcdir[-1:] != "/":
|
|
|
+ arcpath = f"{arcdir}/{arcpath}"
|
|
|
+ if not time:
|
|
|
+ time = datetime.datetime(year=2024, month=4, day=1)
|
|
|
+ data = f"{commit}\n".encode()
|
|
|
+ self.add_file_data(arcpath=arcpath, data=data, mode=0o100644, time=time)
|
|
|
+
|
|
|
+ def add_file_path(self, arcpath: str, path: Path):
|
|
|
+ assert path.is_file(), f"{path} should be a file"
|
|
|
+ for zf in self._zip_files:
|
|
|
+ zf.write(path, arcname=arcpath)
|
|
|
+ for tf in self._tar_files:
|
|
|
+ tf.add(path, arcname=arcpath)
|
|
|
+
|
|
|
+ def add_file_directory(self, arcdirpath: str, dirpath: Path):
|
|
|
+ assert dirpath.is_dir()
|
|
|
+ if arcdirpath and arcdirpath[-1:] != "/":
|
|
|
+ arcdirpath += "/"
|
|
|
+ for f in dirpath.iterdir():
|
|
|
+ if f.is_file():
|
|
|
+ arcpath = f"{arcdirpath}{f.name}"
|
|
|
+ logger.debug("Adding %s to %s", f, arcpath)
|
|
|
+ self.add_file_path(arcpath=arcpath, path=f)
|
|
|
+
|
|
|
+ def close(self):
|
|
|
+ # Archiver is intentionally made invalid after this function
|
|
|
+ del self._zip_files
|
|
|
+ self._zip_files = None
|
|
|
+ del self._tar_files
|
|
|
+ self._tar_files = None
|
|
|
+
|
|
|
+ def __enter__(self):
|
|
|
+ return self
|
|
|
+
|
|
|
+ def __exit__(self, type, value, traceback):
|
|
|
+ self.close()
|
|
|
+
|
|
|
+
|
|
|
+class SourceCollector:
|
|
|
+ TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "symtarget", "directory", "time"))
|
|
|
+ def __init__(self, root: Path, commit: str, filter: typing.Optional[Callable[[str], bool]], executer: Executer):
|
|
|
self.root = root
|
|
|
self.commit = commit
|
|
|
- self.dist_path = dist_path
|
|
|
- self.section_printer = section_printer
|
|
|
+ self.filter = filter
|
|
|
self.executer = executer
|
|
|
- self.cmake_generator = cmake_generator
|
|
|
+ self._git_contents: typing.Optional[dict[str, SourceCollector.TreeItem]] = None
|
|
|
|
|
|
- self.artifacts: dict[str, Path] = {}
|
|
|
+ def _get_git_contents(self) -> dict[str, TreeItem]:
|
|
|
+ contents_tgz = subprocess.check_output(["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"], cwd=self.root, text=False)
|
|
|
+ tar_archive = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz")
|
|
|
+ filenames = tuple(m.name for m in tar_archive if (m.isfile() or m.issym()))
|
|
|
|
|
|
- @property
|
|
|
- def dry(self) -> bool:
|
|
|
- return self.executer.dry
|
|
|
+ file_times = self._get_file_times(paths=filenames)
|
|
|
+ git_contents = {}
|
|
|
+ for ti in tar_archive:
|
|
|
+ if self.filter and not self.filter(ti.name):
|
|
|
+ continue
|
|
|
+ data = None
|
|
|
+ symtarget = None
|
|
|
+ directory = False
|
|
|
+ file_time = None
|
|
|
+ if ti.isfile():
|
|
|
+ contents_file = tar_archive.extractfile(ti.name)
|
|
|
+ data = contents_file.read()
|
|
|
+ file_time = file_times[ti.name]
|
|
|
+ elif ti.issym():
|
|
|
+ symtarget = ti.linkname
|
|
|
+ file_time = file_times[ti.name]
|
|
|
+ elif ti.isdir():
|
|
|
+ directory = True
|
|
|
+ else:
|
|
|
+ raise ValueError(f"{ti.name}: unknown type")
|
|
|
+ git_contents[ti.name] = self.TreeItem(path=ti.name, mode=ti.mode, data=data, symtarget=symtarget, directory=directory, time=file_time)
|
|
|
+ return git_contents
|
|
|
|
|
|
- def prepare(self):
|
|
|
- logger.debug("Creating dist folder")
|
|
|
- self.dist_path.mkdir(parents=True, exist_ok=True)
|
|
|
+ @property
|
|
|
+ def git_contents(self) -> dict[str, TreeItem]:
|
|
|
+ if self._git_contents is None:
|
|
|
+ self._git_contents = self._get_git_contents()
|
|
|
+ return self._git_contents
|
|
|
|
|
|
- TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "time"))
|
|
|
def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime]:
|
|
|
dry_out = textwrap.dedent("""\
|
|
|
time=2024-03-14T15:40:25-07:00
|
|
|
|
|
|
M\tCMakeLists.txt
|
|
|
""")
|
|
|
- git_log_out = self.executer.run(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], stdout=True, dry_out=dry_out).stdout.splitlines(keepends=False)
|
|
|
+ git_log_out = self.executer.check_output(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], dry_out=dry_out, cwd=self.root).splitlines(keepends=False)
|
|
|
current_time = None
|
|
|
set_paths = set(paths)
|
|
|
path_times: dict[str, datetime.datetime] = {}
|
|
@@ -175,98 +325,191 @@ class Releaser:
|
|
|
if not line:
|
|
|
continue
|
|
|
if line.startswith("time="):
|
|
|
- current_time = datetime.datetime.fromisoformat(line.removeprefix("time="))
|
|
|
+ current_time = safe_isotime_to_datetime(line.removeprefix("time="))
|
|
|
continue
|
|
|
mod_type, file_paths = line.split(maxsplit=1)
|
|
|
assert current_time is not None
|
|
|
for file_path in file_paths.split("\t"):
|
|
|
if file_path in set_paths and file_path not in path_times:
|
|
|
path_times[file_path] = current_time
|
|
|
- assert set(path_times.keys()) == set_paths
|
|
|
+
|
|
|
+ # FIXME: find out why some files are not shown in "git log"
|
|
|
+ # assert set(path_times.keys()) == set_paths
|
|
|
+ if set(path_times.keys()) != set_paths:
|
|
|
+ found_times = set(path_times.keys())
|
|
|
+ paths_without_times = set_paths.difference(found_times)
|
|
|
+ logger.warning("No times found for these paths: %s", paths_without_times)
|
|
|
+ max_time = max(time for time in path_times.values())
|
|
|
+ for path in paths_without_times:
|
|
|
+ path_times[path] = max_time
|
|
|
+
|
|
|
return path_times
|
|
|
|
|
|
- @staticmethod
|
|
|
- def _path_filter(path: str):
|
|
|
+ def add_to_archiver(self, archive_base: str, archiver: Archiver):
|
|
|
+ remaining_symlinks = set()
|
|
|
+ added_files = dict()
|
|
|
+
|
|
|
+ def calculate_symlink_target(s: SourceCollector.TreeItem) -> str:
|
|
|
+ dest_dir = os.path.dirname(s.path)
|
|
|
+ if dest_dir:
|
|
|
+ dest_dir += "/"
|
|
|
+ target = dest_dir + s.symtarget
|
|
|
+ while True:
|
|
|
+ new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target)
|
|
|
+ print(f"{target=} {new_target=}")
|
|
|
+ target = new_target
|
|
|
+ if not n:
|
|
|
+ break
|
|
|
+ return target
|
|
|
+
|
|
|
+ # Add files in first pass
|
|
|
+ for git_file in self.git_contents.values():
|
|
|
+ if git_file.data is not None:
|
|
|
+ archiver.add_file_data(arcpath=f"{archive_base}/{git_file.path}", data=git_file.data, time=git_file.time, mode=git_file.mode)
|
|
|
+ added_files[git_file.path] = git_file
|
|
|
+ elif git_file.symtarget is not None:
|
|
|
+ remaining_symlinks.add(git_file)
|
|
|
+
|
|
|
+ # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive
|
|
|
+ while True:
|
|
|
+ if not remaining_symlinks:
|
|
|
+ break
|
|
|
+ symlinks_this_time = set()
|
|
|
+ extra_added_files = {}
|
|
|
+ for symlink in remaining_symlinks:
|
|
|
+ symlink_files_for_zip = {}
|
|
|
+ symlink_target_path = calculate_symlink_target(symlink)
|
|
|
+ if symlink_target_path in added_files:
|
|
|
+ symlink_files_for_zip[symlink.path] = added_files[symlink_target_path]
|
|
|
+ else:
|
|
|
+ symlink_target_path_slash = symlink_target_path + "/"
|
|
|
+ for added_file in added_files:
|
|
|
+ if added_file.startswith(symlink_target_path_slash):
|
|
|
+ path_in_symlink = symlink.path + "/" + added_file.removeprefix(symlink_target_path_slash)
|
|
|
+ symlink_files_for_zip[path_in_symlink] = added_files[added_file]
|
|
|
+ if symlink_files_for_zip:
|
|
|
+ symlinks_this_time.add(symlink)
|
|
|
+ extra_added_files.update(symlink_files_for_zip)
|
|
|
+ files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()]
|
|
|
+ archiver.add_symlink(arcpath=f"{archive_base}/{symlink.path}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip)
|
|
|
+ # if not symlinks_this_time:
|
|
|
+ # logger.info("files added: %r", set(path for path in added_files.keys()))
|
|
|
+ assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}"
|
|
|
+ remaining_symlinks.difference_update(symlinks_this_time)
|
|
|
+ added_files.update(extra_added_files)
|
|
|
+
|
|
|
+
|
|
|
+class Releaser:
|
|
|
+ def __init__(self, release_info: dict, commit: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool):
|
|
|
+ self.release_info = release_info
|
|
|
+ self.project = release_info["name"]
|
|
|
+ self.version = self.extract_sdl_version(root=root, release_info=release_info)
|
|
|
+ self.root = root
|
|
|
+ self.commit = commit
|
|
|
+ self.dist_path = dist_path
|
|
|
+ self.section_printer = section_printer
|
|
|
+ self.executer = executer
|
|
|
+ self.cmake_generator = cmake_generator
|
|
|
+ self.cpu_count = multiprocessing.cpu_count()
|
|
|
+ self.deps_path = deps_path
|
|
|
+ self.overwrite = overwrite
|
|
|
+ self.github = github
|
|
|
+ self.fast = fast
|
|
|
+
|
|
|
+ self.artifacts: dict[str, Path] = {}
|
|
|
+
|
|
|
+ @property
|
|
|
+ def dry(self) -> bool:
|
|
|
+ return self.executer.dry
|
|
|
+
|
|
|
+ def prepare(self):
|
|
|
+ logger.debug("Creating dist folder")
|
|
|
+ self.dist_path.mkdir(parents=True, exist_ok=True)
|
|
|
+
|
|
|
+ @classmethod
|
|
|
+ def _path_filter(cls, path: str) -> bool:
|
|
|
+ if ".gitmodules" in path:
|
|
|
+ return True
|
|
|
if path.startswith(".git"):
|
|
|
return False
|
|
|
return True
|
|
|
|
|
|
- def _get_git_contents(self) -> dict[str, TreeItem]:
|
|
|
- contents_tgz = subprocess.check_output(["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"], text=False)
|
|
|
- contents = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz")
|
|
|
- filenames = tuple(m.name for m in contents if m.isfile())
|
|
|
- assert "src/SDL.c" in filenames
|
|
|
- assert "include/SDL.h" in filenames
|
|
|
- file_times = self._get_file_times(filenames)
|
|
|
- git_contents = {}
|
|
|
- for ti in contents:
|
|
|
- if not ti.isfile():
|
|
|
- continue
|
|
|
- if not self._path_filter(ti.name):
|
|
|
- continue
|
|
|
- contents_file = contents.extractfile(ti.name)
|
|
|
- assert contents_file, f"{ti.name} is not a file"
|
|
|
- git_contents[ti.name] = self.TreeItem(path=ti.name, mode=ti.mode, data=contents_file.read(), time=file_times[ti.name])
|
|
|
- return git_contents
|
|
|
+ @classmethod
|
|
|
+ def _external_repo_path_filter(cls, path: str) -> bool:
|
|
|
+ if not cls._path_filter(path):
|
|
|
+ return False
|
|
|
+ if path.startswith("test/") or path.startswith("tests/"):
|
|
|
+ return False
|
|
|
+ return True
|
|
|
|
|
|
def create_source_archives(self) -> None:
|
|
|
archive_base = f"{self.project}-{self.version}"
|
|
|
|
|
|
- git_contents = self._get_git_contents()
|
|
|
- git_files = list(git_contents.values())
|
|
|
- assert len(git_contents) == len(git_files)
|
|
|
-
|
|
|
- latest_mod_time = max(item.time for item in git_files)
|
|
|
+ project_souce_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter)
|
|
|
|
|
|
- git_files.append(self.TreeItem(path="VERSION.txt", data=f"{self.version}\n".encode(), mode=0o100644, time=latest_mod_time))
|
|
|
- git_files.append(self.TreeItem(path=GIT_HASH_FILENAME, data=f"{self.commit}\n".encode(), mode=0o100644, time=latest_mod_time))
|
|
|
-
|
|
|
- git_files.sort(key=lambda v: v.time)
|
|
|
+ latest_mod_time = max(item.time for item in project_souce_collector.git_contents.values() if item.time)
|
|
|
|
|
|
zip_path = self.dist_path / f"{archive_base}.zip"
|
|
|
- logger.info("Creating .zip source archive (%s)...", zip_path)
|
|
|
+ tgz_path = self.dist_path / f"{archive_base}.tar.gz"
|
|
|
+ txz_path = self.dist_path / f"{archive_base}.tar.xz"
|
|
|
+
|
|
|
+ logger.info("Creating zip/tgz/txz source archives ...")
|
|
|
if self.dry:
|
|
|
zip_path.touch()
|
|
|
+ tgz_path.touch()
|
|
|
+ txz_path.touch()
|
|
|
else:
|
|
|
- with zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED) as zip_object:
|
|
|
- for git_file in git_files:
|
|
|
- file_data_time = (git_file.time.year, git_file.time.month, git_file.time.day, git_file.time.hour, git_file.time.minute, git_file.time.second)
|
|
|
- zip_info = zipfile.ZipInfo(filename=f"{archive_base}/{git_file.path}", date_time=file_data_time)
|
|
|
- zip_info.external_attr = git_file.mode << 16
|
|
|
- zip_info.compress_type = zipfile.ZIP_DEFLATED
|
|
|
- zip_object.writestr(zip_info, data=git_file.data)
|
|
|
- self.artifacts["src-zip"] = zip_path
|
|
|
+ with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
|
|
|
+ archiver.add_file_data(arcpath=f"{archive_base}/VERSION.txt", data=f"{self.version}\n".encode(), mode=0o100644, time=latest_mod_time)
|
|
|
+ archiver.add_file_data(arcpath=f"{archive_base}/{GIT_HASH_FILENAME}", data=f"{self.commit}\n".encode(), mode=0o100644, time=latest_mod_time)
|
|
|
|
|
|
- tar_types = (
|
|
|
- (".tar.gz", "gz"),
|
|
|
- (".tar.xz", "xz"),
|
|
|
- )
|
|
|
- for ext, comp in tar_types:
|
|
|
- tar_path = self.dist_path / f"{archive_base}{ext}"
|
|
|
- logger.info("Creating %s source archive (%s)...", ext, tar_path)
|
|
|
- if self.dry:
|
|
|
- tar_path.touch()
|
|
|
- else:
|
|
|
- with tarfile.open(tar_path, f"w:{comp}") as tar_object:
|
|
|
- for git_file in git_files:
|
|
|
- tar_info = tarfile.TarInfo(f"{archive_base}/{git_file.path}")
|
|
|
- tar_info.mode = git_file.mode
|
|
|
- tar_info.size = len(git_file.data)
|
|
|
- tar_info.mtime = git_file.time.timestamp()
|
|
|
- tar_object.addfile(tar_info, fileobj=io.BytesIO(git_file.data))
|
|
|
-
|
|
|
- if tar_path.suffix == ".gz":
|
|
|
- # Zero the embedded timestamp in the gzip'ed tarball
|
|
|
- with open(tar_path, "r+b") as f:
|
|
|
- f.seek(4, 0)
|
|
|
- f.write(b"\x00\x00\x00\x00")
|
|
|
+ print(f"Adding source files of main project ...")
|
|
|
+ project_souce_collector.add_to_archiver(archive_base=archive_base, archiver=archiver)
|
|
|
+
|
|
|
+ for extra_repo in self.release_info["source"].get("extra-repos", []):
|
|
|
+ extra_repo_root = self.root / extra_repo
|
|
|
+ assert (extra_repo_root / ".git").exists(), f"{extra_repo_root} must be a git repo"
|
|
|
+ extra_repo_commit = self.executer.check_output(["git", "rev-parse", "HEAD"], dry_out=f"gitsha-extra-repo-{extra_repo}", cwd=extra_repo_root).strip()
|
|
|
+ extra_repo_source_collector = SourceCollector(root=extra_repo_root, commit=extra_repo_commit, executer=self.executer, filter=self._external_repo_path_filter)
|
|
|
+ print(f"Adding source files of {extra_repo} ...")
|
|
|
+ extra_repo_source_collector.add_to_archiver(archive_base=f"{archive_base}/{extra_repo}", archiver=archiver)
|
|
|
+
|
|
|
+ for file in self.release_info["source"]["checks"]:
|
|
|
+ assert f"{archive_base}/{file}" in archiver.added_files, f"'{archive_base}/{file}' must exist"
|
|
|
|
|
|
- self.artifacts[f"src-tar-{comp}"] = tar_path
|
|
|
+ logger.info("... done")
|
|
|
|
|
|
- def create_framework(self, configuration: str="Release") -> None:
|
|
|
- dmg_in = self.root / f"Xcode/SDL/build/{self.project}.dmg"
|
|
|
+ self.artifacts["src-zip"] = zip_path
|
|
|
+ self.artifacts["src-tar-gz"] = tgz_path
|
|
|
+ self.artifacts["src-tar-xz"] = txz_path
|
|
|
+
|
|
|
+ if not self.dry:
|
|
|
+ with tgz_path.open("r+b") as f:
|
|
|
+ # Zero the embedded timestamp in the gzip'ed tarball
|
|
|
+ f.seek(4, 0)
|
|
|
+ f.write(b"\x00\x00\x00\x00")
|
|
|
+
|
|
|
+ def create_dmg(self, configuration: str="Release") -> None:
|
|
|
+ dmg_in = self.root / self.release_info["dmg"]["path"]
|
|
|
+ xcode_project = self.root / self.release_info["dmg"]["project"]
|
|
|
+ assert xcode_project.is_dir(), f"{xcode_project} must be a directory"
|
|
|
+ assert (xcode_project / "project.pbxproj").is_file, f"{xcode_project} must contain project.pbxproj"
|
|
|
dmg_in.unlink(missing_ok=True)
|
|
|
- self.executer.run(["xcodebuild", "-project", str(self.root / "Xcode/SDL/SDL.xcodeproj"), "-target", "Standard DMG", "-configuration", configuration])
|
|
|
+ build_xcconfig = self.release_info["dmg"].get("build-xcconfig")
|
|
|
+ if build_xcconfig:
|
|
|
+ shutil.copy(self.root / build_xcconfig, xcode_project.parent / "build.xcconfig")
|
|
|
+
|
|
|
+ xcode_scheme = self.release_info["dmg"].get("scheme")
|
|
|
+ xcode_target = self.release_info["dmg"].get("target")
|
|
|
+ assert xcode_scheme or xcode_target, "dmg needs scheme or target"
|
|
|
+ assert not (xcode_scheme and xcode_target), "dmg cannot have both scheme and target set"
|
|
|
+ if xcode_scheme:
|
|
|
+ scheme_or_target = "-scheme"
|
|
|
+ target_like = xcode_scheme
|
|
|
+ else:
|
|
|
+ scheme_or_target = "-target"
|
|
|
+ target_like = xcode_target
|
|
|
+ self.executer.run(["xcodebuild", "ONLY_ACTIVE_ARCH=NO", "-project", xcode_project, scheme_or_target, target_like, "-configuration", configuration])
|
|
|
if self.dry:
|
|
|
dmg_in.parent.mkdir(parents=True, exist_ok=True)
|
|
|
dmg_in.touch()
|
|
@@ -294,225 +537,316 @@ class Releaser:
|
|
|
tar_info.mtime = int(time.timestamp())
|
|
|
tar_object.addfile(tar_info, fileobj=io.BytesIO(self.git_hash_data))
|
|
|
|
|
|
- def _zip_add_git_hash(self, zip_file: zipfile.ZipFile, root: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None):
|
|
|
- if not time:
|
|
|
- time = datetime.datetime(year=2024, month=4, day=1)
|
|
|
- path = GIT_HASH_FILENAME
|
|
|
- if root:
|
|
|
- path = f"{root}/{path}"
|
|
|
-
|
|
|
- file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
|
|
|
- zip_info = zipfile.ZipInfo(filename=path, date_time=file_data_time)
|
|
|
- zip_info.external_attr = 0o100644 << 16
|
|
|
- zip_info.compress_type = zipfile.ZIP_DEFLATED
|
|
|
- zip_file.writestr(zip_info, data=self.git_hash_data)
|
|
|
-
|
|
|
def create_mingw_archives(self) -> None:
|
|
|
build_type = "Release"
|
|
|
- mingw_archs = ("i686", "x86_64")
|
|
|
build_parent_dir = self.root / "build-mingw"
|
|
|
-
|
|
|
- zip_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.zip"
|
|
|
- tar_exts = ("gz", "xz")
|
|
|
- tar_paths = { ext: self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.{ext}" for ext in tar_exts}
|
|
|
+ assert "autotools" in self.release_info["mingw"]
|
|
|
+ assert "cmake" not in self.release_info["mingw"]
|
|
|
+ mingw_archs = self.release_info["mingw"]["autotools"]["archs"]
|
|
|
+ ARCH_TO_TRIPLET = {
|
|
|
+ "x86": "i686-w64-mingw32",
|
|
|
+ "x64": "x86_64-w64-mingw32",
|
|
|
+ }
|
|
|
+
|
|
|
+ new_env = dict(os.environ)
|
|
|
+
|
|
|
+ if "dependencies" in self.release_info["mingw"]:
|
|
|
+ mingw_deps_path = self.deps_path / "mingw-deps"
|
|
|
+ shutil.rmtree(mingw_deps_path, ignore_errors=True)
|
|
|
+ mingw_deps_path.mkdir()
|
|
|
+
|
|
|
+ for triplet in ARCH_TO_TRIPLET.values():
|
|
|
+ (mingw_deps_path / triplet).mkdir()
|
|
|
+
|
|
|
+ def extract_filter(member: tarfile.TarInfo, path: str, /):
|
|
|
+ if member.name.startswith("SDL"):
|
|
|
+ member.name = "/".join(Path(member.name).parts[1:])
|
|
|
+ return member
|
|
|
+ for dep in self.release_info["dependencies"].keys():
|
|
|
+ extract_dir = mingw_deps_path / f"extract-{dep}"
|
|
|
+ extract_dir.mkdir()
|
|
|
+ with chdir(extract_dir):
|
|
|
+ tar_path = glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)[0]
|
|
|
+ logger.info("Extracting %s to %s", tar_path, mingw_deps_path)
|
|
|
+ with tarfile.open(self.deps_path / tar_path, mode="r:gz") as tarf:
|
|
|
+ tarf.extractall(filter=extract_filter)
|
|
|
+ for triplet in ARCH_TO_TRIPLET.values():
|
|
|
+ self.executer.run(["make", f"-j{os.cpu_count()}", "-C", str(extract_dir), "install-package", f"arch={triplet}", f"prefix={str(mingw_deps_path / triplet)}"])
|
|
|
+
|
|
|
+ dep_binpath = mingw_deps_path / triplet / "bin"
|
|
|
+ assert dep_binpath.is_dir(), f"{dep_binpath} for PATH should exist"
|
|
|
+ dep_pkgconfig = mingw_deps_path / triplet / "lib/pkgconfig"
|
|
|
+ assert dep_pkgconfig.is_dir(), f"{dep_pkgconfig} for PKG_CONFIG_PATH should exist"
|
|
|
+
|
|
|
+ new_env["PATH"] = os.pathsep.join([str(dep_binpath), new_env["PATH"]])
|
|
|
+ new_env["PKG_CONFIG_PATH"] = str(dep_pkgconfig)
|
|
|
+
|
|
|
+ new_env["CFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
|
|
|
+ new_env["CXXFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
|
|
|
|
|
|
arch_install_paths = {}
|
|
|
arch_files = {}
|
|
|
-
|
|
|
for arch in mingw_archs:
|
|
|
- build_path = build_parent_dir / f"build-{arch}"
|
|
|
- install_path = build_parent_dir / f"install-{arch}"
|
|
|
+ triplet = ARCH_TO_TRIPLET[arch]
|
|
|
+ new_env["CC"] = f"{triplet}-gcc"
|
|
|
+ new_env["CXX"] = f"{triplet}-g++"
|
|
|
+ new_env["RC"] = f"{triplet}-windres"
|
|
|
+
|
|
|
+ build_path = build_parent_dir / f"build-{triplet}"
|
|
|
+ install_path = build_parent_dir / f"install-{triplet}"
|
|
|
arch_install_paths[arch] = install_path
|
|
|
shutil.rmtree(install_path, ignore_errors=True)
|
|
|
build_path.mkdir(parents=True, exist_ok=True)
|
|
|
- with self.section_printer.group(f"Configuring MinGW {arch}"):
|
|
|
+ with self.section_printer.group(f"Configuring MinGW {triplet}"):
|
|
|
+ extra_args = [arg.replace("@DEP_PREFIX@", str(mingw_deps_path / triplet)) for arg in self.release_info["mingw"]["autotools"]["args"]]
|
|
|
+ assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
|
|
|
self.executer.run([
|
|
|
- "cmake", "-S", str(self.root), "-B", str(build_path),
|
|
|
- "--fresh",
|
|
|
- f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
|
|
|
- f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
|
|
|
- "-DSDL_SHARED=ON",
|
|
|
- "-DSDL_STATIC=ON",
|
|
|
- "-DSDL_DISABLE_INSTALL_DOCS=ON",
|
|
|
- "-DSDL_TEST_LIBRARY=ON",
|
|
|
- "-DSDL_TESTS=OFF",
|
|
|
- "-DCMAKE_INSTALL_BINDIR=bin",
|
|
|
- "-DCMAKE_INSTALL_DATAROOTDIR=share",
|
|
|
- "-DCMAKE_INSTALL_INCLUDEDIR=include",
|
|
|
- "-DCMAKE_INSTALL_LIBDIR=lib",
|
|
|
- f"-DCMAKE_BUILD_TYPE={build_type}",
|
|
|
- f"-DCMAKE_TOOLCHAIN_FILE={self.root}/build-scripts/cmake-toolchain-mingw64-{arch}.cmake",
|
|
|
- f"-G{self.cmake_generator}",
|
|
|
- f"-DCMAKE_INSTALL_PREFIX={install_path}",
|
|
|
- ])
|
|
|
- with self.section_printer.group(f"Build MinGW {arch}"):
|
|
|
- self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type])
|
|
|
- with self.section_printer.group(f"Install MinGW {arch}"):
|
|
|
- self.executer.run(["cmake", "--install", str(build_path), "--strip", "--config", build_type])
|
|
|
+ self.root / "configure",
|
|
|
+ f"--prefix={install_path}",
|
|
|
+ f"--includedir={install_path}/include",
|
|
|
+ f"--libdir={install_path}/lib",
|
|
|
+ f"--bindir={install_path}/bin",
|
|
|
+ f"--host={triplet}",
|
|
|
+ f"--build=x86_64-none-linux-gnu",
|
|
|
+ ] + extra_args, cwd=build_path, env=new_env)
|
|
|
+ with self.section_printer.group(f"Build MinGW {triplet}"):
|
|
|
+ self.executer.run(["make", f"-j{self.cpu_count}"], cwd=build_path, env=new_env)
|
|
|
+ with self.section_printer.group(f"Install MinGW {triplet}"):
|
|
|
+ self.executer.run(["make", "install"], cwd=build_path, env=new_env)
|
|
|
arch_files[arch] = list(Path(r) / f for r, _, files in os.walk(install_path) for f in files)
|
|
|
|
|
|
- extra_files = (
|
|
|
- ("mingw/pkg-support/INSTALL.txt", ""),
|
|
|
- ("mingw/pkg-support/Makefile", ""),
|
|
|
- ("mingw/pkg-support/cmake/sdl2-config.cmake", "cmake/"),
|
|
|
- ("mingw/pkg-support/cmake/sdl2-config-version.cmake", "cmake/"),
|
|
|
- ("BUGS.txt", ""),
|
|
|
- ("CREDITS.txt", ""),
|
|
|
- ("README-SDL.txt", ""),
|
|
|
- ("WhatsNew.txt", ""),
|
|
|
- ("LICENSE.txt", ""),
|
|
|
- ("README.md", ""),
|
|
|
- ("docs/*.md", "docs/"),
|
|
|
- )
|
|
|
- test_files = list(Path(r) / f for r, _, files in os.walk(self.root / "test") for f in files)
|
|
|
-
|
|
|
- # FIXME: split SDL2.dll debug information into debug library
|
|
|
- # objcopy --only-keep-debug SDL2.dll SDL2.debug.dll
|
|
|
- # objcopy --add-gnu-debuglink=SDL2.debug.dll SDL2.dll
|
|
|
- # objcopy --strip-debug SDL2.dll
|
|
|
-
|
|
|
- for comp in tar_exts:
|
|
|
- logger.info("Creating %s...", tar_paths[comp])
|
|
|
- with tarfile.open(tar_paths[comp], f"w:{comp}") as tar_object:
|
|
|
- arc_root = f"{self.project}-{self.version}"
|
|
|
- for file_path_glob, arcdirname in extra_files:
|
|
|
- assert not arcdirname or arcdirname[-1] == "/"
|
|
|
- for file_path in glob.glob(file_path_glob, root_dir=self.root):
|
|
|
- arcname = f"{arc_root}/{arcdirname}{Path(file_path).name}"
|
|
|
- tar_object.add(self.root / file_path, arcname=arcname)
|
|
|
- for arch in mingw_archs:
|
|
|
- install_path = arch_install_paths[arch]
|
|
|
- arcname_parent = f"{arc_root}/{arch}-w64-mingw32"
|
|
|
- for file in arch_files[arch]:
|
|
|
- arcname = os.path.join(arcname_parent, file.relative_to(install_path))
|
|
|
- tar_object.add(file, arcname=arcname)
|
|
|
- for test_file in test_files:
|
|
|
- arcname = f"{arc_root}/test/{test_file.relative_to(self.root/'test')}"
|
|
|
- tar_object.add(test_file, arcname=arcname)
|
|
|
- self._tar_add_git_hash(tar_object=tar_object, root=arc_root)
|
|
|
-
|
|
|
- self.artifacts[f"mingw-devel-tar-{comp}"] = tar_paths[comp]
|
|
|
-
|
|
|
- def build_vs(self, arch: str, platform: str, vs: VisualStudio, configuration: str="Release") -> VcArchDevel:
|
|
|
- dll_path = self.root / f"VisualC/SDL/{platform}/{configuration}/{self.project}.dll"
|
|
|
- pdb_path = self.root / f"VisualC/SDL/{platform}/{configuration}/{self.project}.pdb"
|
|
|
- imp_path = self.root / f"VisualC/SDL/{platform}/{configuration}/{self.project}.lib"
|
|
|
- test_path = self.root / f"VisualC/SDLtest/{platform}/{configuration}/{self.project}test.lib"
|
|
|
- main_path = self.root / f"VisualC/SDLmain/{platform}/{configuration}/{self.project}main.lib"
|
|
|
-
|
|
|
- dll_path.unlink(missing_ok=True)
|
|
|
- pdb_path.unlink(missing_ok=True)
|
|
|
- imp_path.unlink(missing_ok=True)
|
|
|
- test_path.unlink(missing_ok=True)
|
|
|
- main_path.unlink(missing_ok=True)
|
|
|
-
|
|
|
- projects = [
|
|
|
- self.root / "VisualC/SDL/SDL.vcxproj",
|
|
|
- self.root / "VisualC/SDLmain/SDLmain.vcxproj",
|
|
|
- self.root / "VisualC/SDLtest/SDLtest.vcxproj",
|
|
|
+ print("Collecting files for MinGW development archive ...")
|
|
|
+ archived_files = {}
|
|
|
+ arc_root = f"{self.project}-{self.version}"
|
|
|
+ for arch in mingw_archs:
|
|
|
+ triplet = ARCH_TO_TRIPLET[arch]
|
|
|
+ install_path = arch_install_paths[arch]
|
|
|
+ arcname_parent = f"{arc_root}/{triplet}"
|
|
|
+ for file in arch_files[arch]:
|
|
|
+ arcname = os.path.join(arcname_parent, file.relative_to(install_path))
|
|
|
+ logger.debug("Adding %s as %s", file, arcname)
|
|
|
+ archived_files[arcname] = file
|
|
|
+ for meta_destdir, file_globs in self.release_info["mingw"]["files"].items():
|
|
|
+ assert meta_destdir[0] == "/" and meta_destdir[-1] == "/", f"'{meta_destdir}' must begin and end with '/'"
|
|
|
+ if "@" in meta_destdir:
|
|
|
+ destdirs = list(meta_destdir.replace("@TRIPLET@", triplet) for triplet in ARCH_TO_TRIPLET.values())
|
|
|
+ assert not any("A" in d for d in destdirs)
|
|
|
+ else:
|
|
|
+ destdirs = [meta_destdir]
|
|
|
+
|
|
|
+ assert isinstance(file_globs, list), f"'{file_globs}' in release_info.json must be a list of globs instead"
|
|
|
+ for file_glob in file_globs:
|
|
|
+ file_paths = glob.glob(file_glob, root_dir=self.root)
|
|
|
+ assert file_paths, f"glob '{file_glob}' does not match any file"
|
|
|
+ for file_path in file_paths:
|
|
|
+ file_path = self.root / file_path
|
|
|
+ for destdir in destdirs:
|
|
|
+ arcname = f"{arc_root}{destdir}{file_path.name}"
|
|
|
+ logger.debug("Adding %s as %s", file_path, arcname)
|
|
|
+ archived_files[arcname] = file_path
|
|
|
+ print("... done")
|
|
|
+
|
|
|
+ print("Creating zip/tgz/txz development archives ...")
|
|
|
+ zip_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.zip"
|
|
|
+ tgz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.gz"
|
|
|
+ txz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.xz"
|
|
|
+ with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
|
|
|
+ for arcpath, path in archived_files.items():
|
|
|
+ archiver.add_file_path(arcpath=arcpath, path=path)
|
|
|
+ print("... done")
|
|
|
+
|
|
|
+ self.artifacts["mingw-devel-zip"] = zip_path
|
|
|
+ self.artifacts["mingw-devel-tar-gz"] = tgz_path
|
|
|
+ self.artifacts["mingw-devel-tar-xz"] = txz_path
|
|
|
+
|
|
|
+ def download_dependencies(self):
|
|
|
+ shutil.rmtree(self.deps_path, ignore_errors=True)
|
|
|
+ self.deps_path.mkdir(parents=True)
|
|
|
+
|
|
|
+ if self.github:
|
|
|
+ with open(os.environ["GITHUB_OUTPUT"], "a") as f:
|
|
|
+ f.write(f"dep-path={self.deps_path.absolute()}\n")
|
|
|
+
|
|
|
+ for dep, depinfo in self.release_info["dependencies"].items():
|
|
|
+ startswith = depinfo["startswith"]
|
|
|
+ dep_repo = depinfo["repo"]
|
|
|
+ dep_string_data = self.executer.check_output(["gh", "-R", dep_repo, "release", "list", "--exclude-drafts", "--exclude-pre-releases", "--json", "name,createdAt,tagName", "--jq", f'[.[]|select(.name|startswith("{startswith}"))]|max_by(.createdAt)']).strip()
|
|
|
+ dep_data = json.loads(dep_string_data)
|
|
|
+ dep_tag = dep_data["tagName"]
|
|
|
+ dep_version = dep_data["name"]
|
|
|
+ logger.info("Download dependency %s version %s (tag=%s) ", dep, dep_version, dep_tag)
|
|
|
+ self.executer.run(["gh", "-R", dep_repo, "release", "download", dep_tag], cwd=self.deps_path)
|
|
|
+ if self.github:
|
|
|
+ with open(os.environ["GITHUB_OUTPUT"], "a") as f:
|
|
|
+ f.write(f"dep-{dep.lower()}-version={dep_version}\n")
|
|
|
+
|
|
|
+ def verify_dependencies(self):
|
|
|
+ for dep, depinfo in self.release_info.get("dependencies", {}).items():
|
|
|
+ mingw_matches = glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
|
|
|
+ assert len(mingw_matches) == 1, f"Exactly one archive matches mingw {dep} dependency: {mingw_matches}"
|
|
|
+ dmg_matches = glob.glob(self.release_info["dmg"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
|
|
|
+ assert len(dmg_matches) == 1, f"Exactly one archive matches dmg {dep} dependency: {dmg_matches}"
|
|
|
+ msvc_matches = glob.glob(self.release_info["msvc"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
|
|
|
+ assert len(msvc_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}"
|
|
|
+
|
|
|
+ def build_vs(self, arch_platform: VsArchPlatformConfig, vs: VisualStudio):
|
|
|
+ msvc_deps_path = self.deps_path / "msvc-deps"
|
|
|
+ shutil.rmtree(msvc_deps_path, ignore_errors=True)
|
|
|
+ if "dependencies" in self.release_info["msvc"]:
|
|
|
+ for dep, depinfo in self.release_info["msvc"]["dependencies"].items():
|
|
|
+ msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
|
|
|
+
|
|
|
+ src_globs = [arch_platform.configure(instr["src"]) for instr in depinfo["copy"]]
|
|
|
+ with zipfile.ZipFile(msvc_zip, "r") as zf:
|
|
|
+ for member in zf.namelist():
|
|
|
+ member_path = "/".join(Path(member).parts[1:])
|
|
|
+ for src_i, src_glob in enumerate(src_globs):
|
|
|
+ if fnmatch.fnmatch(member_path, src_glob):
|
|
|
+ dst = (self.root / arch_platform.configure(depinfo["copy"][src_i]["dst"])).resolve() / Path(member_path).name
|
|
|
+ zip_data = zf.read(member)
|
|
|
+ if dst.exists():
|
|
|
+ identical = False
|
|
|
+ if dst.is_file():
|
|
|
+ orig_bytes = dst.read_bytes()
|
|
|
+ if orig_bytes == zip_data:
|
|
|
+ identical = True
|
|
|
+ if not identical:
|
|
|
+ logger.warning("Extracting dependency %s, will cause %s to be overwritten", dep, dst)
|
|
|
+ if not self.overwrite:
|
|
|
+ raise RuntimeError("Run with --overwrite to allow overwriting")
|
|
|
+ logger.debug("Extracting %s -> %s", member, dst)
|
|
|
+
|
|
|
+ dst.parent.mkdir(exist_ok=True, parents=True)
|
|
|
+ dst.write_bytes(zip_data)
|
|
|
+
|
|
|
+ assert "msbuild" in self.release_info["msvc"]
|
|
|
+ assert "cmake" not in self.release_info["msvc"]
|
|
|
+ built_paths = [
|
|
|
+ self.root / arch_platform.configure(f) for msbuild_files in self.release_info["msvc"]["msbuild"]["files"] for f in msbuild_files["paths"]
|
|
|
]
|
|
|
|
|
|
- with self.section_printer.group(f"Build {arch} VS binary"):
|
|
|
- vs.build(arch=arch, platform=platform, configuration=configuration, projects=projects)
|
|
|
+ for b in built_paths:
|
|
|
+ b.unlink(missing_ok=True)
|
|
|
+
|
|
|
+ projects = self.release_info["msvc"]["msbuild"]["projects"]
|
|
|
+
|
|
|
+ with self.section_printer.group(f"Build {arch_platform.arch} VS binary"):
|
|
|
+ vs.build(arch_platform=arch_platform, projects=projects)
|
|
|
|
|
|
if self.dry:
|
|
|
- dll_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
- dll_path.touch()
|
|
|
- pdb_path.touch()
|
|
|
- imp_path.touch()
|
|
|
- main_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
- main_path.touch()
|
|
|
- test_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
- test_path.touch()
|
|
|
-
|
|
|
- assert dll_path.is_file(), f"{self.project}.dll has not been created"
|
|
|
- assert pdb_path.is_file(), f"{self.project}.pdb has not been created"
|
|
|
- assert imp_path.is_file(), f"{self.project}.lib has not been created"
|
|
|
- assert main_path.is_file(), f"{self.project}main.lib has not been created"
|
|
|
- assert test_path.is_file(), f"{self.project}est.lib has not been created"
|
|
|
-
|
|
|
- zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch}.zip"
|
|
|
- zip_path.unlink(missing_ok=True)
|
|
|
- logger.info("Creating %s", zip_path)
|
|
|
- with zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED) as zf:
|
|
|
- logger.debug("Adding %s", dll_path.name)
|
|
|
- zf.write(dll_path, arcname=dll_path.name)
|
|
|
- logger.debug("Adding %s", "README-SDL.txt")
|
|
|
- zf.write(self.root / "README-SDL.txt", arcname="README-SDL.txt")
|
|
|
- self._zip_add_git_hash(zip_file=zf)
|
|
|
- self.artifacts[f"VC-{arch}"] = zip_path
|
|
|
+ for b in built_paths:
|
|
|
+ b.parent.mkdir(parents=True, exist_ok=True)
|
|
|
+ b.touch()
|
|
|
|
|
|
- return VcArchDevel(dll=dll_path, pdb=pdb_path, imp=imp_path, main=main_path, test=test_path)
|
|
|
+ for b in built_paths:
|
|
|
+ assert b.is_file(), f"{b} has not been created"
|
|
|
+ b.parent.mkdir(parents=True, exist_ok=True)
|
|
|
+ b.touch()
|
|
|
|
|
|
+ zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip"
|
|
|
+ zip_path.unlink(missing_ok=True)
|
|
|
+ logger.info("Creating %s", zip_path)
|
|
|
|
|
|
- def build_vs_devel(self, arch_vc: dict[str, VcArchDevel]) -> None:
|
|
|
+ with Archiver(zip_path=zip_path) as archiver:
|
|
|
+ for msbuild_files in self.release_info["msvc"]["msbuild"]["files"]:
|
|
|
+ if "lib" in msbuild_files:
|
|
|
+ arcdir = arch_platform.configure(msbuild_files["lib"])
|
|
|
+ for p in msbuild_files["paths"]:
|
|
|
+ p = arch_platform.configure(p)
|
|
|
+ archiver.add_file_path(path=self.root / p, arcpath=f"{arcdir}/{Path(p).name}")
|
|
|
+ for extra_files in self.release_info["msvc"]["files"]:
|
|
|
+ if "lib" in extra_files:
|
|
|
+ arcdir = arch_platform.configure(extra_files["lib"])
|
|
|
+ for p in extra_files["paths"]:
|
|
|
+ p = arch_platform.configure(p)
|
|
|
+ archiver.add_file_path(path=self.root / p, arcpath=f"{arcdir}/{Path(p).name}")
|
|
|
+
|
|
|
+ archiver.add_git_hash(commit=self.commit)
|
|
|
+ self.artifacts[f"VC-{arch_platform.arch}"] = zip_path
|
|
|
+
|
|
|
+ for p in built_paths:
|
|
|
+ assert p.is_file(), f"{p} should exist"
|
|
|
+
|
|
|
+ def build_vs_devel(self, arch_platforms: list[VsArchPlatformConfig]) -> None:
|
|
|
zip_path = self.dist_path / f"{self.project}-devel-{self.version}-VC.zip"
|
|
|
archive_prefix = f"{self.project}-{self.version}"
|
|
|
|
|
|
- def zip_file(zf: zipfile.ZipFile, path: Path, arcrelpath: str):
|
|
|
- arcname = f"{archive_prefix}/{arcrelpath}"
|
|
|
- logger.debug("Adding %s to %s", path, arcname)
|
|
|
- zf.write(path, arcname=arcname)
|
|
|
-
|
|
|
- def zip_directory(zf: zipfile.ZipFile, directory: Path, arcrelpath: str):
|
|
|
- for f in directory.iterdir():
|
|
|
- if f.is_file():
|
|
|
- arcname = f"{archive_prefix}/{arcrelpath}/{f.name}"
|
|
|
- logger.debug("Adding %s to %s", f, arcname)
|
|
|
- zf.write(f, arcname=arcname)
|
|
|
-
|
|
|
- with zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED) as zf:
|
|
|
- for arch, binaries in arch_vc.items():
|
|
|
- zip_file(zf, path=binaries.dll, arcrelpath=f"lib/{arch}/{binaries.dll.name}")
|
|
|
- zip_file(zf, path=binaries.imp, arcrelpath=f"lib/{arch}/{binaries.imp.name}")
|
|
|
- zip_file(zf, path=binaries.pdb, arcrelpath=f"lib/{arch}/{binaries.pdb.name}")
|
|
|
- zip_file(zf, path=binaries.main, arcrelpath=f"lib/{arch}/{binaries.main.name}")
|
|
|
- zip_file(zf, path=binaries.test, arcrelpath=f"lib/{arch}/{binaries.test.name}")
|
|
|
-
|
|
|
- zip_directory(zf, directory=self.root / "include", arcrelpath="include")
|
|
|
- zip_directory(zf, directory=self.root / "docs", arcrelpath="docs")
|
|
|
- zip_directory(zf, directory=self.root / "VisualC/pkg-support/cmake", arcrelpath="cmake")
|
|
|
-
|
|
|
- for txt in ("BUGS.txt", "README-SDL.txt", "WhatsNew.txt"):
|
|
|
- zip_file(zf, path=self.root / txt, arcrelpath=txt)
|
|
|
- zip_file(zf, path=self.root / "LICENSE.txt", arcrelpath="COPYING.txt")
|
|
|
- zip_file(zf, path=self.root / "README.md", arcrelpath="README.txt")
|
|
|
-
|
|
|
- self._zip_add_git_hash(zip_file=zf, root=archive_prefix)
|
|
|
+ with Archiver(zip_path=zip_path) as archiver:
|
|
|
+ for msbuild_files in self.release_info["msvc"]["msbuild"]["files"]:
|
|
|
+ if "devel" in msbuild_files:
|
|
|
+ for meta_glob_path in msbuild_files["paths"]:
|
|
|
+ if "@" in meta_glob_path or "@" in msbuild_files["devel"]:
|
|
|
+ for arch_platform in arch_platforms:
|
|
|
+ glob_path = arch_platform.configure(meta_glob_path)
|
|
|
+ paths = glob.glob(glob_path, root_dir=self.root)
|
|
|
+ dst_subdirpath = arch_platform.configure(msbuild_files['devel'])
|
|
|
+ for path in paths:
|
|
|
+ path = self.root / path
|
|
|
+ arcpath = f"{archive_prefix}/{dst_subdirpath}/{Path(path).name}"
|
|
|
+ archiver.add_file_path(path=path, arcpath=arcpath)
|
|
|
+ else:
|
|
|
+ paths = glob.glob(meta_glob_path, root_dir=self.root)
|
|
|
+ for path in paths:
|
|
|
+ path = self.root / path
|
|
|
+ arcpath = f"{archive_prefix}/{msbuild_files['devel']}/{Path(path).name}"
|
|
|
+ archiver.add_file_path(path=path, arcpath=arcpath)
|
|
|
+ for extra_files in self.release_info["msvc"]["files"]:
|
|
|
+ if "devel" in extra_files:
|
|
|
+ for meta_glob_path in extra_files["paths"]:
|
|
|
+ if "@" in meta_glob_path or "@" in extra_files["devel"]:
|
|
|
+ for arch_platform in arch_platforms:
|
|
|
+ glob_path = arch_platform.configure(meta_glob_path)
|
|
|
+ paths = glob.glob(glob_path, root_dir=self.root)
|
|
|
+ dst_subdirpath = arch_platform.configure(extra_files['devel'])
|
|
|
+ for path in paths:
|
|
|
+ path = self.root / path
|
|
|
+ arcpath = f"{archive_prefix}/{dst_subdirpath}/{Path(path).name}"
|
|
|
+ archiver.add_file_path(path=path, arcpath=arcpath)
|
|
|
+ else:
|
|
|
+ paths = glob.glob(meta_glob_path, root_dir=self.root)
|
|
|
+ for path in paths:
|
|
|
+ path = self.root / path
|
|
|
+ arcpath = f"{archive_prefix}/{extra_files['devel']}/{Path(path).name}"
|
|
|
+ archiver.add_file_path(path=path, arcpath=arcpath)
|
|
|
+
|
|
|
+ archiver.add_git_hash(commit=self.commit, arcdir=archive_prefix)
|
|
|
self.artifacts["VC-devel"] = zip_path
|
|
|
|
|
|
@classmethod
|
|
|
- def extract_sdl_version(cls, root: Path, project: str) -> str:
|
|
|
- with open(root / f"include/SDL_version.h", "r") as f:
|
|
|
+ def extract_sdl_version(cls, root: Path, release_info: dict) -> str:
|
|
|
+ with open(root / release_info["version"]["file"], "r") as f:
|
|
|
text = f.read()
|
|
|
- major = next(re.finditer(r"^#define SDL_MAJOR_VERSION\s+([0-9]+)$", text, flags=re.M)).group(1)
|
|
|
- minor = next(re.finditer(r"^#define SDL_MINOR_VERSION\s+([0-9]+)$", text, flags=re.M)).group(1)
|
|
|
- micro = next(re.finditer(r"^#define SDL_PATCHLEVEL\s+([0-9]+)$", text, flags=re.M)).group(1)
|
|
|
+ major = next(re.finditer(release_info["version"]["re_major"], text, flags=re.M)).group(1)
|
|
|
+ minor = next(re.finditer(release_info["version"]["re_minor"], text, flags=re.M)).group(1)
|
|
|
+ micro = next(re.finditer(release_info["version"]["re_micro"], text, flags=re.M)).group(1)
|
|
|
return f"{major}.{minor}.{micro}"
|
|
|
|
|
|
|
|
|
def main(argv=None) -> int:
|
|
|
+ if sys.version_info < (3, 11):
|
|
|
+ logger.error("This script needs at least python 3.11")
|
|
|
+ return 1
|
|
|
+
|
|
|
parser = argparse.ArgumentParser(allow_abbrev=False, description="Create SDL release artifacts")
|
|
|
- parser.add_argument("--root", metavar="DIR", type=Path, default=Path(__file__).absolute().parents[1], help="Root of SDL")
|
|
|
+ parser.add_argument("--root", metavar="DIR", type=Path, default=Path(__file__).absolute().parents[1], help="Root of project")
|
|
|
+ parser.add_argument("--release-info", metavar="JSON", dest="path_release_info", type=Path, default=Path(__file__).absolute().parent / "release-info.json", help="Path of release-info.json")
|
|
|
+ parser.add_argument("--dependency-folder", metavar="FOLDER", dest="deps_path", type=Path, default="deps", help="Directory containing pre-built archives of dependencies (will be removed when downloading archives)")
|
|
|
parser.add_argument("--out", "-o", metavar="DIR", dest="dist_path", type=Path, default="dist", help="Output directory")
|
|
|
parser.add_argument("--github", action="store_true", help="Script is running on a GitHub runner")
|
|
|
parser.add_argument("--commit", default="HEAD", help="Git commit/tag of which a release should be created")
|
|
|
- parser.add_argument("--project", required=True, help="Name of the project (e.g. SDL2")
|
|
|
- parser.add_argument("--create", choices=["source", "mingw", "win32", "framework", "android"], required=True, action="append", dest="actions", help="What to do")
|
|
|
+ parser.add_argument("--actions", choices=["download", "source", "mingw", "msvc", "dmg"], required=True, nargs="+", dest="actions", help="What to do?")
|
|
|
parser.set_defaults(loglevel=logging.INFO)
|
|
|
parser.add_argument('--vs-year', dest="vs_year", help="Visual Studio year")
|
|
|
- parser.add_argument('--android-api', type=int, dest="android_api", help="Android API version")
|
|
|
- parser.add_argument('--android-home', dest="android_home", default=os.environ.get("ANDROID_HOME"), help="Android Home folder")
|
|
|
- parser.add_argument('--android-ndk-home', dest="android_ndk_home", default=os.environ.get("ANDROID_NDK_HOME"), help="Android NDK Home folder")
|
|
|
- parser.add_argument('--android-abis', dest="android_abis", nargs="*", choices=ANDROID_AVAILABLE_ABIS, default=list(ANDROID_AVAILABLE_ABIS), help="Android NDK Home folder")
|
|
|
parser.add_argument('--cmake-generator', dest="cmake_generator", default="Ninja", help="CMake Generator")
|
|
|
parser.add_argument('--debug', action='store_const', const=logging.DEBUG, dest="loglevel", help="Print script debug information")
|
|
|
parser.add_argument('--dry-run', action='store_true', dest="dry", help="Don't execute anything")
|
|
|
parser.add_argument('--force', action='store_true', dest="force", help="Ignore a non-clean git tree")
|
|
|
+ parser.add_argument('--overwrite', action='store_true', dest="overwrite", help="Allow potentially overwriting other projects")
|
|
|
+ parser.add_argument('--fast', action='store_true', dest="fast", help="Don't do a rebuild")
|
|
|
|
|
|
args = parser.parse_args(argv)
|
|
|
logging.basicConfig(level=args.loglevel, format='[%(levelname)s] %(message)s')
|
|
|
- args.actions = set(args.actions)
|
|
|
+ args.deps_path = args.deps_path.absolute()
|
|
|
args.dist_path = args.dist_path.absolute()
|
|
|
args.root = args.root.absolute()
|
|
|
args.dist_path = args.dist_path.absolute()
|
|
@@ -524,6 +858,9 @@ def main(argv=None) -> int:
|
|
|
else:
|
|
|
section_printer = SectionPrinter()
|
|
|
|
|
|
+ if args.github and "GITHUB_OUTPUT" not in os.environ:
|
|
|
+ os.environ["GITHUB_OUTPUT"] = "/tmp/github_output.txt"
|
|
|
+
|
|
|
executer = Executer(root=args.root, dry=args.dry)
|
|
|
|
|
|
root_git_hash_path = args.root / GIT_HASH_FILENAME
|
|
@@ -535,101 +872,97 @@ def main(argv=None) -> int:
|
|
|
logger.warning("Commit argument is %s, but archive commit is %s. Using %s.", args.commit, archive_commit, archive_commit)
|
|
|
args.commit = archive_commit
|
|
|
else:
|
|
|
- args.commit = executer.run(["git", "rev-parse", args.commit], stdout=True, dry_out="e5812a9fd2cda317b503325a702ba3c1c37861d9").stdout.strip()
|
|
|
+ args.commit = executer.check_output(["git", "rev-parse", args.commit], dry_out="e5812a9fd2cda317b503325a702ba3c1c37861d9").strip()
|
|
|
logger.info("Using commit %s", args.commit)
|
|
|
|
|
|
+ try:
|
|
|
+ with args.path_release_info.open() as f:
|
|
|
+ release_info = json.load(f)
|
|
|
+ except FileNotFoundError:
|
|
|
+ logger.error(f"Could not find {args.path_release_info}")
|
|
|
+
|
|
|
releaser = Releaser(
|
|
|
- project=args.project,
|
|
|
+ release_info=release_info,
|
|
|
commit=args.commit,
|
|
|
root=args.root,
|
|
|
dist_path=args.dist_path,
|
|
|
executer=executer,
|
|
|
section_printer=section_printer,
|
|
|
cmake_generator=args.cmake_generator,
|
|
|
+ deps_path=args.deps_path,
|
|
|
+ overwrite=args.overwrite,
|
|
|
+ github=args.github,
|
|
|
+ fast=args.fast,
|
|
|
)
|
|
|
|
|
|
if root_is_maybe_archive:
|
|
|
logger.warning("Building from archive. Skipping clean git tree check.")
|
|
|
else:
|
|
|
- porcelain_status = executer.run(["git", "status", "--ignored", "--porcelain"], stdout=True, dry_out="\n").stdout.strip()
|
|
|
+ porcelain_status = executer.check_output(["git", "status", "--ignored", "--porcelain"], dry_out="\n").strip()
|
|
|
if porcelain_status:
|
|
|
print(porcelain_status)
|
|
|
logger.warning("The tree is dirty! Do not publish any generated artifacts!")
|
|
|
if not args.force:
|
|
|
raise Exception("The git repo contains modified and/or non-committed files. Run with --force to ignore.")
|
|
|
|
|
|
+ if args.fast:
|
|
|
+ logger.warning("Doing fast build! Do not publish generated artifacts!")
|
|
|
+
|
|
|
with section_printer.group("Arguments"):
|
|
|
- print(f"project = {args.project}")
|
|
|
+ print(f"project = {releaser.project}")
|
|
|
print(f"version = {releaser.version}")
|
|
|
print(f"commit = {args.commit}")
|
|
|
print(f"out = {args.dist_path}")
|
|
|
print(f"actions = {args.actions}")
|
|
|
print(f"dry = {args.dry}")
|
|
|
print(f"force = {args.force}")
|
|
|
+ print(f"overwrite = {args.overwrite}")
|
|
|
print(f"cmake_generator = {args.cmake_generator}")
|
|
|
|
|
|
releaser.prepare()
|
|
|
|
|
|
+ if "download" in args.actions:
|
|
|
+ releaser.download_dependencies()
|
|
|
+
|
|
|
+ if set(args.actions).intersection({"msvc", "mingw"}):
|
|
|
+ print("Verifying presence of dependencies (run 'download' action to download) ...")
|
|
|
+ releaser.verify_dependencies()
|
|
|
+ print("... done")
|
|
|
+
|
|
|
if "source" in args.actions:
|
|
|
if root_is_maybe_archive:
|
|
|
raise Exception("Cannot build source archive from source archive")
|
|
|
with section_printer.group("Create source archives"):
|
|
|
releaser.create_source_archives()
|
|
|
|
|
|
- if "framework" in args.actions:
|
|
|
+ if "dmg" in args.actions:
|
|
|
if platform.system() != "Darwin" and not args.dry:
|
|
|
parser.error("framework artifact(s) can only be built on Darwin")
|
|
|
|
|
|
- releaser.create_framework()
|
|
|
+ releaser.create_dmg()
|
|
|
|
|
|
- if "win32" in args.actions:
|
|
|
+ if "msvc" in args.actions:
|
|
|
if platform.system() != "Windows" and not args.dry:
|
|
|
- parser.error("win32 artifact(s) can only be built on Windows")
|
|
|
+ parser.error("msvc artifact(s) can only be built on Windows")
|
|
|
with section_printer.group("Find Visual Studio"):
|
|
|
vs = VisualStudio(executer=executer)
|
|
|
- x86 = releaser.build_vs(arch="x86", platform="Win32", vs=vs)
|
|
|
- x64 = releaser.build_vs(arch="x64", platform="x64", vs=vs)
|
|
|
+
|
|
|
+ arch_platforms = [
|
|
|
+ VsArchPlatformConfig(arch="x86", platform="Win32", configuration="Release"),
|
|
|
+ VsArchPlatformConfig(arch="x64", platform="x64", configuration="Release"),
|
|
|
+ ]
|
|
|
+ for arch_platform in arch_platforms:
|
|
|
+ releaser.build_vs(arch_platform=arch_platform, vs=vs)
|
|
|
with section_printer.group("Create SDL VC development zip"):
|
|
|
- arch_vc = {
|
|
|
- "x86": x86,
|
|
|
- "x64": x64,
|
|
|
- }
|
|
|
- releaser.build_vs_devel(arch_vc)
|
|
|
+ releaser.build_vs_devel(arch_platforms)
|
|
|
|
|
|
if "mingw" in args.actions:
|
|
|
releaser.create_mingw_archives()
|
|
|
|
|
|
- if "android" in args.actions:
|
|
|
- if args.android_home is None or not Path(args.android_home).is_dir():
|
|
|
- parser.error("Invalid $ANDROID_HOME or --android-home: must be a directory containing the Android SDK")
|
|
|
- if args.android_ndk_home is None or not Path(args.android_ndk_home).is_dir():
|
|
|
- parser.error("Invalid $ANDROID_NDK_HOME or --android_ndk_home: must be a directory containing the Android NDK")
|
|
|
- if args.android_api is None:
|
|
|
- with section_printer.group("Detect Android APIS"):
|
|
|
- args.android_api = releaser.detect_android_api(android_home=args.android_home)
|
|
|
- if args.android_api is None or not (Path(args.android_home) / f"platforms/android-{args.android_api}").is_dir():
|
|
|
- parser.error("Invalid --android-api, and/or could not be detected")
|
|
|
- if not args.android_abis:
|
|
|
- parser.error("Need at least one Android ABI")
|
|
|
- with section_printer.group("Android arguments"):
|
|
|
- print(f"android_home = {args.android_home}")
|
|
|
- print(f"android_ndk_home = {args.android_ndk_home}")
|
|
|
- print(f"android_api = {args.android_api}")
|
|
|
- print(f"android_abis = {args.android_abis}")
|
|
|
- releaser.create_android_archives(
|
|
|
- android_api=args.android_api,
|
|
|
- android_home=args.android_home,
|
|
|
- android_ndk_home=args.android_ndk_home,
|
|
|
- android_abis=args.android_abis,
|
|
|
- )
|
|
|
-
|
|
|
-
|
|
|
with section_printer.group("Summary"):
|
|
|
print(f"artifacts = {releaser.artifacts}")
|
|
|
|
|
|
if args.github:
|
|
|
- if args.dry:
|
|
|
- os.environ["GITHUB_OUTPUT"] = "/tmp/github_output.txt"
|
|
|
with open(os.environ["GITHUB_OUTPUT"], "a") as f:
|
|
|
f.write(f"project={releaser.project}\n")
|
|
|
f.write(f"version={releaser.version}\n")
|