Bläddra i källkod

Add parametrized build script

[ci skip]
Anonymous Maarten 6 månader sedan
förälder
incheckning
45dfdfbb7b
31 ändrade filer med 1822 tillägg och 929 borttagningar
  1. 2 2
      .github/workflows/generic.yml
  2. 50 59
      .github/workflows/release.yml
  3. 1 0
      .gitignore
  4. 19 16
      CMakeLists.txt
  5. 1 1
      Xcode/SDL/SDL.xcodeproj/project.pbxproj
  6. 8 0
      Xcode/SDL/pkg-support/build.xcconfig
  7. 0 0
      Xcode/SDL/pkg-support/resources/cmake/SDL3Config.cmake
  8. 0 0
      Xcode/SDL/pkg-support/resources/cmake/SDL3ConfigVersion.cmake
  9. 18 6
      Xcode/SDL/pkg-support/share/cmake/SDL3/SDL3Config.cmake
  10. 0 0
      Xcode/SDL/pkg-support/share/cmake/SDL3/SDL3ConfigVersion.cmake
  11. 1089 553
      build-scripts/build-release.py
  12. 4 1
      build-scripts/create-android-project.py
  13. 43 0
      build-scripts/create-release.py
  14. 0 6
      build-scripts/create-release.sh
  15. 61 58
      build-scripts/pkg-support/android/INSTALL.md.in
  16. 7 7
      build-scripts/pkg-support/android/__main__.py.in
  17. 3 22
      build-scripts/pkg-support/android/cmake/SDL3ConfigVersion.cmake.in
  18. 5 0
      build-scripts/pkg-support/android/description.json.in
  19. 2 0
      build-scripts/pkg-support/mingw/Makefile
  20. 8 0
      build-scripts/pkg-support/msvc/Directory.Build.props
  21. 3 3
      build-scripts/pkg-support/msvc/cmake/SDL3Config.cmake.in
  22. 3 22
      build-scripts/pkg-support/msvc/cmake/SDL3ConfigVersion.cmake.in
  23. 41 0
      build-scripts/pkg-support/source/SDL_revision.h.cmake.in
  24. 56 0
      build-scripts/pkg-support/source/SDL_revision.h.in
  25. 215 0
      build-scripts/release-info.json
  26. 0 1
      build-scripts/updaterev.sh
  27. 3 2
      cmake/android/FindSdlAndroidPlatform.cmake
  28. 1 1
      cmake/sdlcompilers.cmake
  29. 156 148
      cmake/sdlcpu.cmake
  30. 15 13
      docs/README-android.md
  31. 8 8
      include/build_config/SDL_revision.h.cmake

+ 2 - 2
.github/workflows/generic.yml

@@ -191,9 +191,9 @@ jobs:
         run: |
         run: |
           echo "This should show us the SDL_REVISION"
           echo "This should show us the SDL_REVISION"
           echo "Shared library:"
           echo "Shared library:"
-          ${{ (matrix.platform.shared-lib && format('strings build/{0} | grep SDL-', matrix.platform.shared-lib)) || 'echo "<Shared library not supported by platform>"' }}
+          ${{ (matrix.platform.shared-lib && format('strings build/{0} | grep "Github Workflow"', matrix.platform.shared-lib)) || 'echo "<Shared library not supported by platform>"' }}
           echo "Static library:"
           echo "Static library:"
-          ${{ (matrix.platform.static-lib && format('strings build/{0} | grep SDL-', matrix.platform.static-lib)) || 'echo "<Static library not supported by platform>"' }}
+          ${{ (matrix.platform.static-lib && format('strings build/{0} | grep "Github Workflow"', matrix.platform.static-lib)) || 'echo "<Static library not supported by platform>"' }}
       - name: 'Run build-time tests (CMake)'
       - name: 'Run build-time tests (CMake)'
         id: tests
         id: tests
         if: ${{ !matrix.platform.no-cmake && matrix.platform.run-tests }}
         if: ${{ !matrix.platform.no-cmake && matrix.platform.run-tests }}

+ 50 - 59
.github/workflows/release.yml

@@ -37,9 +37,8 @@ jobs:
         shell: bash
         shell: bash
         run: |
         run: |
           python build-scripts/build-release.py \
           python build-scripts/build-release.py \
-            --create source \
+            --actions source \
             --commit ${{ inputs.commit }} \
             --commit ${{ inputs.commit }} \
-            --project SDL3 \
             --root "${{ github.workspace }}/SDL" \
             --root "${{ github.workspace }}/SDL" \
             --github \
             --github \
             --debug
             --debug
@@ -93,7 +92,7 @@ jobs:
       - name: 'Set up Python'
       - name: 'Set up Python'
         uses: actions/setup-python@v5
         uses: actions/setup-python@v5
         with:
         with:
-          python-version: '3.10'
+          python-version: '3.11'
       - name: 'Fetch build-release.py'
       - name: 'Fetch build-release.py'
         uses: actions/checkout@v4
         uses: actions/checkout@v4
         with:
         with:
@@ -114,9 +113,8 @@ jobs:
         shell: bash
         shell: bash
         run: |
         run: |
           python build-scripts/build-release.py \
           python build-scripts/build-release.py \
-            --create xcframework \
+            --actions dmg \
             --commit ${{ inputs.commit }} \
             --commit ${{ inputs.commit }} \
-            --project SDL3 \
             --root "${{ steps.tar.outputs.path }}" \
             --root "${{ steps.tar.outputs.path }}" \
             --github \
             --github \
             --debug
             --debug
@@ -240,12 +238,13 @@ jobs:
     outputs:
     outputs:
       VC-x86: ${{ steps.releaser.outputs.VC-x86 }}
       VC-x86: ${{ steps.releaser.outputs.VC-x86 }}
       VC-x64: ${{ steps.releaser.outputs.VC-x64 }}
       VC-x64: ${{ steps.releaser.outputs.VC-x64 }}
+      VC-arm64: ${{ steps.releaser.outputs.VC-arm64 }}
       VC-devel: ${{ steps.releaser.outputs.VC-devel }}
       VC-devel: ${{ steps.releaser.outputs.VC-devel }}
     steps:
     steps:
       - name: 'Set up Python'
       - name: 'Set up Python'
         uses: actions/setup-python@v5
         uses: actions/setup-python@v5
         with:
         with:
-          python-version: '3.10'
+          python-version: '3.11'
       - name: 'Fetch build-release.py'
       - name: 'Fetch build-release.py'
         uses: actions/checkout@v4
         uses: actions/checkout@v4
         with:
         with:
@@ -265,12 +264,11 @@ jobs:
       - name: 'Build MSVC binary archives'
       - name: 'Build MSVC binary archives'
         id: releaser
         id: releaser
         run: |
         run: |
-          python build-scripts/build-release.py     `
-            --create win32                          `
-            --commit ${{ inputs.commit }}           `
-            --project SDL3                          `
-            --root "${{ steps.zip.outputs.path }}"  `
-            --github                                `
+          python build-scripts/build-release.py `
+            --actions msvc `
+            --commit ${{ inputs.commit }} `
+            --root "${{ steps.zip.outputs.path }}" `
+            --github `
             --debug
             --debug
       - name: 'Store MSVC archives'
       - name: 'Store MSVC archives'
         uses: actions/upload-artifact@v4
         uses: actions/upload-artifact@v4
@@ -405,7 +403,7 @@ jobs:
       - name: 'Set up Python'
       - name: 'Set up Python'
         uses: actions/setup-python@v5
         uses: actions/setup-python@v5
         with:
         with:
-          python-version: '3.10'
+          python-version: '3.11'
       - name: 'Fetch build-release.py'
       - name: 'Fetch build-release.py'
         uses: actions/checkout@v4
         uses: actions/checkout@v4
         with:
         with:
@@ -428,12 +426,11 @@ jobs:
       - name: 'Build MinGW binary archives'
       - name: 'Build MinGW binary archives'
         id: releaser
         id: releaser
         run: |
         run: |
-          python build-scripts/build-release.py     \
-            --create mingw                          \
-            --commit ${{ inputs.commit }}           \
-            --project SDL3                          \
-            --root "${{ steps.tar.outputs.path }}"  \
-            --github                                \
+          python build-scripts/build-release.py \
+            --actions mingw \
+            --commit ${{ inputs.commit }} \
+            --root "${{ steps.tar.outputs.path }}" \
+            --github \
             --debug
             --debug
       - name: 'Store MinGW archives'
       - name: 'Store MinGW archives'
         uses: actions/upload-artifact@v4
         uses: actions/upload-artifact@v4
@@ -507,7 +504,7 @@ jobs:
       - name: 'Set up Python'
       - name: 'Set up Python'
         uses: actions/setup-python@v5
         uses: actions/setup-python@v5
         with:
         with:
-          python-version: '3.10'
+          python-version: '3.11'
       - name: 'Fetch build-release.py'
       - name: 'Fetch build-release.py'
         uses: actions/checkout@v4
         uses: actions/checkout@v4
         with:
         with:
@@ -540,12 +537,11 @@ jobs:
       - name: 'Build Android prefab binary archive(s)'
       - name: 'Build Android prefab binary archive(s)'
         id: releaser
         id: releaser
         run: |
         run: |
-          python build-scripts/build-release.py     \
-            --create android                        \
-            --commit ${{ inputs.commit }}           \
-            --project SDL3                          \
-            --root "${{ steps.tar.outputs.path }}"  \
-            --github                                \
+          python build-scripts/build-release.py \
+            --actions android \
+            --commit ${{ inputs.commit }} \
+            --root "${{ steps.tar.outputs.path }}" \
+            --github \
             --debug
             --debug
       - name: 'Store Android archive(s)'
       - name: 'Store Android archive(s)'
         uses: actions/upload-artifact@v4
         uses: actions/upload-artifact@v4
@@ -560,7 +556,7 @@ jobs:
       - name: 'Set up Python'
       - name: 'Set up Python'
         uses: actions/setup-python@v5
         uses: actions/setup-python@v5
         with:
         with:
-          python-version: '3.10'
+          python-version: '3.11'
       - uses: actions/setup-java@v4
       - uses: actions/setup-java@v4
         with:
         with:
           distribution: 'temurin'
           distribution: 'temurin'
@@ -581,6 +577,29 @@ jobs:
           mkdir -p /tmp/tardir
           mkdir -p /tmp/tardir
           tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}"
           tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}"
           echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
           echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+      - name: 'Extract Android SDK from AAR'
+        id: sdk
+        run: |
+          python "${{ github.workspace }}/${{ needs.android.outputs.android-aar }}" -o /tmp/SDL3-android
+          echo "prefix=/tmp/SDL3-android" >>$GITHUB_OUTPUT
+      - name: 'CMake (configure + build) x86, x64, arm32, arm64'
+        run: |
+          android_abis="x86 x86_64 armeabi-v7a arm64-v8a"
+          for android_abi in ${android_abis}; do
+            echo "Configuring ${android_abi}..."
+            cmake -S "${{ steps.src.outputs.path }}/cmake/test" \
+              -DTEST_FULL=TRUE \
+              -DTEST_STATIC=FALSE \
+              -DTEST_TEST=TRUE \
+              -DCMAKE_PREFIX_PATH="${{ steps.sdk.outputs.prefix }}" \
+              -DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK_HOME}/build/cmake/android.toolchain.cmake \
+              -DANDROID_ABI=${android_abi} \
+              -Werror=dev \
+              -DCMAKE_BUILD_TYPE=Release \
+              -B "${android_abi}"
+            echo "Building ${android_abi}..."
+            cmake --build "${android_abi}" --config Release --verbose
+          done
       - name: 'Create gradle project'
       - name: 'Create gradle project'
         id: create-gradle-project
         id: create-gradle-project
         run: |
         run: |
@@ -591,11 +610,6 @@ jobs:
             --variant aar \
             --variant aar \
             --output "/tmp/projects"
             --output "/tmp/projects"
           echo "path=/tmp/projects/org.libsdl.testspriteminimal" >>$GITHUB_OUTPUT
           echo "path=/tmp/projects/org.libsdl.testspriteminimal" >>$GITHUB_OUTPUT
-
-          echo ""
-          echo "Project contents:"
-          echo ""
-          find "/tmp/projects/org.libsdl.testspriteminimal"
       - name: 'Copy SDL3 aar into Gradle project'
       - name: 'Copy SDL3 aar into Gradle project'
         run: |
         run: |
           cp "${{ github.workspace }}/${{ needs.android.outputs.android-aar }}" "${{ steps.create-gradle-project.outputs.path }}/app/libs"
           cp "${{ github.workspace }}/${{ needs.android.outputs.android-aar }}" "${{ steps.create-gradle-project.outputs.path }}/app/libs"
@@ -604,34 +618,11 @@ jobs:
           echo "Project contents:"
           echo "Project contents:"
           echo ""
           echo ""
           find "${{ steps.create-gradle-project.outputs.path }}"
           find "${{ steps.create-gradle-project.outputs.path }}"
-      - name: 'Build app (Gradle & ndk-build)'
-        run: |
-          cd "${{ steps.create-gradle-project.outputs.path }}"
-          ./gradlew -i assembleRelease -PBUILD_WITH_CMAKE=1
       - name: 'Build app (Gradle & CMake)'
       - name: 'Build app (Gradle & CMake)'
         run: |
         run: |
           cd "${{ steps.create-gradle-project.outputs.path }}"
           cd "${{ steps.create-gradle-project.outputs.path }}"
-          ./gradlew -i assembleRelease
-      - name: 'Extract Android SDK from AAR'
-        id: sdk
-        run: |
-          python "${{ github.workspace }}/${{ needs.android.outputs.android-aar }}" -o /tmp/SDL3-android
-          echo "prefix=/tmp/SDL3-android" >>$GITHUB_OUTPUT
-      - name: 'CMake (configure + build) x86, x64, arm32, arm64'
+          ./gradlew -i assembleRelease -Pandroid.native.buildOutput=verbose -PBUILD_WITH_CMAKE=1
+      - name: 'Build app (Gradle & ndk-build)'
         run: |
         run: |
-          android_abis="x86 x86_64 armeabi-v7a arm64-v8a"
-          for android_abi in ${android_abis}; do
-            echo "Configuring ${android_abi}..."
-            cmake -S "${{ steps.src.outputs.path }}/cmake/test"                               \
-              -DTEST_FULL=TRUE                                                                \
-              -DTEST_STATIC=FALSE                                                             \
-              -DTEST_TEST=TRUE                                                                \
-              -DCMAKE_PREFIX_PATH="${{ steps.sdk.outputs.prefix }}"                           \
-              -DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK_HOME}/build/cmake/android.toolchain.cmake  \
-              -DANDROID_ABI=${android_abi}                                                    \
-              -Werror=dev                                                                     \
-              -DCMAKE_BUILD_TYPE=Release                                                      \
-              -B "${android_abi}"
-            echo "Building ${android_abi}..."
-            cmake --build "${android_abi}" --config Release --verbose
-          done
+          cd "${{ steps.create-gradle-project.outputs.path }}"
+          ./gradlew -i assembleRelease -Pandroid.native.buildOutput=verbose

+ 1 - 0
.gitignore

@@ -51,6 +51,7 @@ cmake-build-*
 .DS_Store
 .DS_Store
 xcuserdata
 xcuserdata
 *.xcworkspace
 *.xcworkspace
+Xcode/build.xcconfig
 
 
 # for Visual Studio Code
 # for Visual Studio Code
 .vscode/
 .vscode/

+ 19 - 16
CMakeLists.txt

@@ -1415,13 +1415,20 @@ if(ANDROID)
         set(javadoc_index_html "${javadocdir}/index.html")
         set(javadoc_index_html "${javadocdir}/index.html")
         add_custom_command(
         add_custom_command(
           OUTPUT "${javadoc_index_html}"
           OUTPUT "${javadoc_index_html}"
-          COMMAND ${CMAKE_COMMAND} -E rm -rf "${javadocdir}"
+          COMMAND ${CMAKE_COMMAND} -E rm -rf "${javadocdir}" "${javadocjar}"
           COMMAND ${Java_JAVADOC_EXECUTABLE} -encoding utf8 -d "${javadocdir}"
           COMMAND ${Java_JAVADOC_EXECUTABLE} -encoding utf8 -d "${javadocdir}"
             -classpath "${path_android_jar}"
             -classpath "${path_android_jar}"
             -author -use -version ${SDL_JAVA_SOURCES}
             -author -use -version ${SDL_JAVA_SOURCES}
           DEPENDS ${SDL_JAVA_SOURCES} "${path_android_jar}"
           DEPENDS ${SDL_JAVA_SOURCES} "${path_android_jar}"
         )
         )
-        add_custom_target(SDL3-javadoc ALL DEPENDS "${javadoc_index_html}")
+        add_custom_command(
+          OUTPUT "${javadocjar}"
+          COMMAND ${Java_JAR_EXECUTABLE} -c -f "${javadocjar}"
+            -C "${javadocdir}" *
+          WORKING_DIRECTORY "${javadocdir}"
+          DEPENDS ${javadoc_index_html}
+        )
+        add_custom_target(SDL3-javadoc ALL DEPENDS "${javadoc_index_html}" "${javadocjar}")
         set_property(TARGET SDL3-javadoc PROPERTY OUTPUT_DIR "${javadocdir}")
         set_property(TARGET SDL3-javadoc PROPERTY OUTPUT_DIR "${javadocdir}")
       endif()
       endif()
     endif()
     endif()
@@ -3044,21 +3051,17 @@ foreach(_hdr IN LISTS SDL3_INCLUDE_FILES)
   endif()
   endif()
 endforeach()
 endforeach()
 
 
-set(SDL_REVISION "" CACHE STRING "Custom SDL revision (overrides SDL_REVISION_SUFFIX)")
-if(NOT SDL_REVISION)
-  set(SDL_REVISION_SUFFIX "" CACHE STRING "Suffix for the SDL revision")
-  if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/VERSION.txt")
-    # If VERSION.txt exists, it contains the SDL version
-    file(READ "${CMAKE_CURRENT_SOURCE_DIR}/VERSION.txt" SDL_REVISION_CENTER)
-    string(STRIP "${SDL_REVISION_CENTER}" SDL_REVISION_CENTER)
+# If REVISION.txt exists, then we are building from a SDL release.
+# SDL_revision.h(.cmake) in source releases have SDL_REVISION baked into them.
+if(NOT EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/REVISION.txt")
+  set(SDL_REVISION "" CACHE STRING "Custom SDL revision")
+  if(SDL_REVISION)
+    set(SDL_REVISION_CENTER "${SDL_VERSION_MAJOR}.${SDL_VERSION_MINOR}.${SDL_VERSION_MICRO}-${SDL_REVISION}")
   else()
   else()
-    # If VERSION does not exist, use git to calculate a version
+    # If SDL_REVISION is not overrided, use git to describe
     git_describe(SDL_REVISION_CENTER)
     git_describe(SDL_REVISION_CENTER)
-    if(NOT SDL_REVISION_CENTER)
-      set(SDL_REVISION_CENTER "${SDL3_VERSION}-no-vcs")
-    endif()
   endif()
   endif()
-  set(SDL_REVISION "SDL-${SDL_REVISION_CENTER}${SDL_REVISION_SUFFIX}")
+  set(SDL_REVISION "SDL3-${SDL_REVISION_CENTER}")
 endif()
 endif()
 
 
 execute_process(COMMAND "${CMAKE_COMMAND}" -E make_directory "${SDL3_BINARY_DIR}/include/SDL3")
 execute_process(COMMAND "${CMAKE_COMMAND}" -E make_directory "${SDL3_BINARY_DIR}/include/SDL3")
@@ -3645,7 +3648,7 @@ if(NOT SDL_DISABLE_INSTALL)
       )
       )
     endif()
     endif()
     if(TARGET SDL3-javasources)
     if(TARGET SDL3-javasources)
-      install(FILES  "${SDL3_BINARY_DIR}/SDL3-${SDL3_VERSION}-sources.jar"
+      install(FILES "${SDL3_BINARY_DIR}/SDL3-${SDL3_VERSION}-sources.jar"
         DESTINATION "${SDL_INSTALL_JAVADIR}/SDL3")
         DESTINATION "${SDL_INSTALL_JAVADIR}/SDL3")
     endif()
     endif()
   endif()
   endif()
@@ -3659,7 +3662,7 @@ if(NOT SDL_DISABLE_INSTALL)
     )
     )
     if(TARGET SDL3-javadoc)
     if(TARGET SDL3-javadoc)
       set(SDL_INSTALL_JAVADOCDIR "${CMAKE_INSTALL_DATAROOTDIR}/javadoc" CACHE PATH "Path where to install SDL3 javadoc")
       set(SDL_INSTALL_JAVADOCDIR "${CMAKE_INSTALL_DATAROOTDIR}/javadoc" CACHE PATH "Path where to install SDL3 javadoc")
-      install(DIRECTORY "${SDL3_BINARY_DIR}/docs/javadoc/"
+      install(FILES "${SDL3_BINARY_DIR}/SDL3-${SDL3_VERSION}-javadoc.jar"
         DESTINATION "${SDL_INSTALL_JAVADOCDIR}/SDL3")
         DESTINATION "${SDL_INSTALL_JAVADOCDIR}/SDL3")
     endif()
     endif()
   endif()
   endif()

+ 1 - 1
Xcode/SDL/SDL.xcodeproj/project.pbxproj

@@ -2795,7 +2795,7 @@
 			);
 			);
 			runOnlyForDeploymentPostprocessing = 0;
 			runOnlyForDeploymentPostprocessing = 0;
 			shellPath = /bin/sh;
 			shellPath = /bin/sh;
-			shellScript = "set -ex\n\nmkdir -p build/dmg-tmp/share/cmake/SDL3\ncp -a build/SDL3.xcframework build/dmg-tmp/\n\ncp pkg-support/resources/License.txt build/dmg-tmp\ncp pkg-support/resources/ReadMe.txt build/dmg-tmp\ncp pkg-support/resources/share/cmake/SDL3/sdl3-config.cmake build/dmg-tmp/share/cmake/SDL3\ncp pkg-support/resources/share/cmake/SDL3/sdl3-config-version.cmake build/dmg-tmp/share/cmake/SDL3\n\n# remove the .DS_Store files if any (we may want to provide one in the future for fancy .dmgs)\nfind build/dmg-tmp -name .DS_Store -exec rm -f \"{}\" \\;\n\n# for fancy .dmg\nmkdir -p build/dmg-tmp/.logo\ncp pkg-support/resources/SDL_DS_Store build/dmg-tmp/.DS_Store\ncp pkg-support/sdl_logo.pdf build/dmg-tmp/.logo\n\n# create the dmg\nhdiutil create -ov -fs HFS+ -volname SDL3 -srcfolder build/dmg-tmp build/SDL3.dmg\n\n# clean up\nrm -rf build/dmg-tmp\n";
+			shellScript = "set -ex\n\nmkdir -p build/dmg-tmp/share/cmake/SDL3\ncp -a build/SDL3.xcframework build/dmg-tmp/\n\ncp pkg-support/resources/License.txt build/dmg-tmp\ncp pkg-support/resources/ReadMe.txt build/dmg-tmp\ncp pkg-support/share/cmake/SDL3/SDL3Config.cmake build/dmg-tmp/share/cmake/SDL3\ncp pkg-support/share/cmake/SDL3/SDL3ConfigVersion.cmake build/dmg-tmp/share/cmake/SDL3\n\n# remove the .DS_Store files if any (we may want to provide one in the future for fancy .dmgs)\nfind build/dmg-tmp -name .DS_Store -exec rm -f \"{}\" \\;\n\n# for fancy .dmg\nmkdir -p build/dmg-tmp/.logo\ncp pkg-support/resources/SDL_DS_Store build/dmg-tmp/.DS_Store\ncp pkg-support/sdl_logo.pdf build/dmg-tmp/.logo\n\n# create the dmg\nhdiutil create -ov -fs HFS+ -volname SDL3 -srcfolder build/dmg-tmp build/SDL3.dmg\n\n# clean up\nrm -rf build/dmg-tmp\n";
 		};
 		};
 		F3B38CF0296F63D1005DA6D3 /* ShellScript */ = {
 		F3B38CF0296F63D1005DA6D3 /* ShellScript */ = {
 			isa = PBXShellScriptBuildPhase;
 			isa = PBXShellScriptBuildPhase;

+ 8 - 0
Xcode/SDL/pkg-support/build.xcconfig

@@ -0,0 +1,8 @@
+//
+//  build.xcconfig
+//
+
+// Configuration settings file format documentation can be found at:
+// https://help.apple.com/xcode/#/dev745c5c974
+
+SDL_PREPROCESSOR_DEFINITIONS = SDL_VENDOR_INFO=\"libsdl.org\"

+ 0 - 0
Xcode/SDL/pkg-support/resources/CMake/sdl3-config.cmake → Xcode/SDL/pkg-support/resources/cmake/SDL3Config.cmake


+ 0 - 0
Xcode/SDL/pkg-support/resources/CMake/sdl3-config-version.cmake → Xcode/SDL/pkg-support/resources/cmake/SDL3ConfigVersion.cmake


+ 18 - 6
Xcode/SDL/pkg-support/resources/share/cmake/SDL3/sdl3-config.cmake → Xcode/SDL/pkg-support/share/cmake/SDL3/SDL3Config.cmake

@@ -32,14 +32,24 @@ endmacro()
 set(SDL3_FOUND TRUE)
 set(SDL3_FOUND TRUE)
 
 
 macro(_check_target_is_simulator)
 macro(_check_target_is_simulator)
-    include(CheckCSourceCompiles)
-    check_c_source_compiles([===[
+    set(src [===[
     #include <TargetConditionals.h>
     #include <TargetConditionals.h>
-    #if defined(TARGET_OS_SIMULATOR)
+    #if defined(TARGET_OS_SIMULATOR) && TARGET_OS_SIMULATOR
     int target_is_simulator;
     int target_is_simulator;
     #endif
     #endif
     int main(int argc, char *argv[]) { return target_is_simulator; }
     int main(int argc, char *argv[]) { return target_is_simulator; }
-    ]===] SDL_TARGET_IS_SIMULATOR)
+    ]===])
+    if(CMAKE_C_COMPILER)
+        include(CheckCSourceCompiles)
+        check_c_source_compiles("${src}" SDL_TARGET_IS_SIMULATOR)
+    elseif(CMAKE_CXX_COMPILER)
+        include(CheckCXXSourceCompiles)
+        check_cxx_source_compiles("${src}" SDL_TARGET_IS_SIMULATOR)
+    else()
+        enable_language(C)
+        include(CheckCSourceCompiles)
+        check_c_source_compiles("${src}" SDL_TARGET_IS_SIMULATOR)
+    endif()
 endmacro()
 endmacro()
 
 
 if(CMAKE_SYSTEM_NAME STREQUAL "iOS")
 if(CMAKE_SYSTEM_NAME STREQUAL "iOS")
@@ -59,7 +69,7 @@ elseif(CMAKE_SYSTEM_NAME STREQUAL "tvOS")
 elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin")
 elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin")
     set(_xcfw_target_subdir "macos-arm64_x86_64")
     set(_xcfw_target_subdir "macos-arm64_x86_64")
 else()
 else()
-    message(WARNING "Unsupported Apple platform (${CMAKE_SYSTEM_NAME}) and broken sdl3-config-version.cmake")
+    message(WARNING "Unsupported Apple platform (${CMAKE_SYSTEM_NAME}) and broken SDL3ConfigVersion.cmake")
     set(SDL3_FOUND FALSE)
     set(SDL3_FOUND FALSE)
     return()
     return()
 endif()
 endif()
@@ -89,7 +99,9 @@ set(SDL3_Headers_FOUND TRUE)
 
 
 if(NOT TARGET SDL3::SDL3-shared)
 if(NOT TARGET SDL3::SDL3-shared)
     add_library(SDL3::SDL3-shared SHARED IMPORTED)
     add_library(SDL3::SDL3-shared SHARED IMPORTED)
-    if(CMAKE_VERSION GREATER_EQUAL "3.28")
+    # CMake does not automatically add RPATHS when using xcframeworks
+    # https://gitlab.kitware.com/cmake/cmake/-/issues/25998
+    if(0)  # if(CMAKE_VERSION GREATER_EQUAL "3.28")
         set_target_properties(SDL3::SDL3-shared
         set_target_properties(SDL3::SDL3-shared
             PROPERTIES
             PROPERTIES
                 FRAMEWORK "TRUE"
                 FRAMEWORK "TRUE"

+ 0 - 0
Xcode/SDL/pkg-support/resources/share/cmake/SDL3/sdl3-config-version.cmake → Xcode/SDL/pkg-support/share/cmake/SDL3/SDL3ConfigVersion.cmake


+ 1089 - 553
build-scripts/build-release.py

@@ -1,17 +1,28 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
 
 
+"""
+This script is shared between SDL2, SDL3, and all satellite libraries.
+Don't specialize this script for doing project-specific modifications.
+Rather, modify release-info.json.
+"""
+
 import argparse
 import argparse
 import collections
 import collections
+import dataclasses
+from collections.abc import Callable
 import contextlib
 import contextlib
 import datetime
 import datetime
+import fnmatch
 import glob
 import glob
 import io
 import io
 import json
 import json
 import logging
 import logging
+import multiprocessing
 import os
 import os
 from pathlib import Path
 from pathlib import Path
 import platform
 import platform
 import re
 import re
+import shlex
 import shutil
 import shutil
 import subprocess
 import subprocess
 import sys
 import sys
@@ -21,21 +32,55 @@ import textwrap
 import typing
 import typing
 import zipfile
 import zipfile
 
 
-logger = logging.getLogger(__name__)
-
 
 
-VcArchDevel = collections.namedtuple("VcArchDevel", ("dll", "pdb", "imp", "test"))
+logger = logging.getLogger(__name__)
 GIT_HASH_FILENAME = ".git-hash"
 GIT_HASH_FILENAME = ".git-hash"
-
-ANDROID_AVAILABLE_ABIS = [
-    "armeabi-v7a",
-    "arm64-v8a",
-    "x86",
-    "x86_64",
-]
-ANDROID_MINIMUM_API = 19
-ANDROID_TARGET_API = 29
-ANDROID_MINIMUM_NDK = 21
+REVISION_TXT = "REVISION.txt"
+
+
+def safe_isotime_to_datetime(str_isotime: str) -> datetime.datetime:
+    try:
+        return datetime.datetime.fromisoformat(str_isotime)
+    except ValueError:
+        pass
+    logger.warning("Invalid iso time: %s", str_isotime)
+    if str_isotime[-6:-5] in ("+", "-"):
+        # Commits can have isotime with invalid timezone offset (e.g. "2021-07-04T20:01:40+32:00")
+        modified_str_isotime = str_isotime[:-6] + "+00:00"
+        try:
+            return datetime.datetime.fromisoformat(modified_str_isotime)
+        except ValueError:
+            pass
+    raise ValueError(f"Invalid isotime: {str_isotime}")
+
+
+def arc_join(*parts: list[str]) -> str:
+    assert all(p[:1] != "/" and p[-1:] != "/" for p in parts), f"None of {parts} may start or end with '/'"
+    return "/".join(p for p in parts if p)
+
+
+@dataclasses.dataclass(frozen=True)
+class VsArchPlatformConfig:
+    arch: str
+    configuration: str
+    platform: str
+
+    def extra_context(self):
+        return {
+            "ARCH": self.arch,
+            "CONFIGURATION": self.configuration,
+            "PLATFORM": self.platform,
+        }
+
+
+@contextlib.contextmanager
+def chdir(path):
+    original_cwd = os.getcwd()
+    try:
+        os.chdir(path)
+        yield
+    finally:
+        os.chdir(original_cwd)
 
 
 
 
 class Executer:
 class Executer:
@@ -43,14 +88,18 @@ class Executer:
         self.root = root
         self.root = root
         self.dry = dry
         self.dry = dry
 
 
-    def run(self, cmd, stdout=False, dry_out=None, force=False):
+    def run(self, cmd, cwd=None, env=None):
+        logger.info("Executing args=%r", cmd)
         sys.stdout.flush()
         sys.stdout.flush()
+        if not self.dry:
+            subprocess.check_call(cmd, cwd=cwd or self.root, env=env, text=True)
+
+    def check_output(self, cmd, cwd=None, dry_out=None, env=None, text=True):
         logger.info("Executing args=%r", cmd)
         logger.info("Executing args=%r", cmd)
-        if self.dry and not force:
-            if stdout:
-                return subprocess.run(["echo", "-E", dry_out or ""], stdout=subprocess.PIPE if stdout else None, text=True, check=True, cwd=self.root)
-        else:
-            return subprocess.run(cmd, stdout=subprocess.PIPE if stdout else None, text=True, check=True, cwd=self.root)
+        sys.stdout.flush()
+        if self.dry:
+            return dry_out
+        return subprocess.check_output(cmd, cwd=cwd or self.root, env=env, text=text)
 
 
 
 
 class SectionPrinter:
 class SectionPrinter:
@@ -103,7 +152,7 @@ class VisualStudio:
                 return None
                 return None
             vswhere_spec.extend(["-version", f"[{version},{version+1})"])
             vswhere_spec.extend(["-version", f"[{version},{version+1})"])
         vswhere_cmd = ["vswhere"] + vswhere_spec + ["-property", "installationPath"]
         vswhere_cmd = ["vswhere"] + vswhere_spec + ["-property", "installationPath"]
-        vs_install_path = Path(self.executer.run(vswhere_cmd, stdout=True, dry_out="/tmp").stdout.strip())
+        vs_install_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp").strip())
         logger.info("VS install_path = %s", vs_install_path)
         logger.info("VS install_path = %s", vs_install_path)
         assert vs_install_path.is_dir(), "VS installation path does not exist"
         assert vs_install_path.is_dir(), "VS installation path does not exist"
         vsdevcmd_path = vs_install_path / "Common7/Tools/vsdevcmd.bat"
         vsdevcmd_path = vs_install_path / "Common7/Tools/vsdevcmd.bat"
@@ -116,7 +165,7 @@ class VisualStudio:
 
 
     def find_msbuild(self) -> typing.Optional[Path]:
     def find_msbuild(self) -> typing.Optional[Path]:
         vswhere_cmd = ["vswhere", "-latest", "-requires", "Microsoft.Component.MSBuild", "-find", r"MSBuild\**\Bin\MSBuild.exe"]
         vswhere_cmd = ["vswhere", "-latest", "-requires", "Microsoft.Component.MSBuild", "-find", r"MSBuild\**\Bin\MSBuild.exe"]
-        msbuild_path = Path(self.executer.run(vswhere_cmd, stdout=True, dry_out="/tmp/MSBuild.exe").stdout.strip())
+        msbuild_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp/MSBuild.exe").strip())
         logger.info("MSBuild path = %s", msbuild_path)
         logger.info("MSBuild path = %s", msbuild_path)
         if self.dry:
         if self.dry:
             msbuild_path.parent.mkdir(parents=True, exist_ok=True)
             msbuild_path.parent.mkdir(parents=True, exist_ok=True)
@@ -124,11 +173,11 @@ class VisualStudio:
         assert msbuild_path.is_file(), "MSBuild.exe does not exist"
         assert msbuild_path.is_file(), "MSBuild.exe does not exist"
         return msbuild_path
         return msbuild_path
 
 
-    def build(self, arch: str, platform: str, configuration: str, projects: list[Path]):
+    def build(self, arch_platform: VsArchPlatformConfig, projects: list[Path]):
         assert projects, "Need at least one project to build"
         assert projects, "Need at least one project to build"
 
 
-        vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch}"
-        msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={platform} /p:Configuration={configuration}" for project in projects])
+        vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch_platform.arch}"
+        msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={arch_platform.platform} /p:Configuration={arch_platform.configuration}" for project in projects])
         bat_contents = f"{vsdev_cmd_str} && {msbuild_cmd_str}\n"
         bat_contents = f"{vsdev_cmd_str} && {msbuild_cmd_str}\n"
         bat_path = Path(tempfile.gettempdir()) / "cmd.bat"
         bat_path = Path(tempfile.gettempdir()) / "cmd.bat"
         with bat_path.open("w") as f:
         with bat_path.open("w") as f:
@@ -139,35 +188,308 @@ class VisualStudio:
         self.executer.run(cmd)
         self.executer.run(cmd)
 
 
 
 
-class Releaser:
-    def __init__(self, project: str, commit: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str):
-        self.project = project
-        self.version = self.extract_sdl_version(root=root, project=project)
+class Archiver:
+    def __init__(self, zip_path: typing.Optional[Path]=None, tgz_path: typing.Optional[Path]=None, txz_path: typing.Optional[Path]=None):
+        self._zip_files = []
+        self._tar_files = []
+        self._added_files = set()
+        if zip_path:
+            self._zip_files.append(zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED))
+        if tgz_path:
+            self._tar_files.append(tarfile.open(tgz_path, "w:gz"))
+        if txz_path:
+            self._tar_files.append(tarfile.open(txz_path, "w:xz"))
+
+    @property
+    def added_files(self) -> set[str]:
+        return self._added_files
+
+    def add_file_data(self, arcpath: str, data: bytes, mode: int, time: datetime.datetime):
+        for zf in self._zip_files:
+            file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
+            zip_info = zipfile.ZipInfo(filename=arcpath, date_time=file_data_time)
+            zip_info.external_attr = mode << 16
+            zip_info.compress_type = zipfile.ZIP_DEFLATED
+            zf.writestr(zip_info, data=data)
+        for tf in self._tar_files:
+            tar_info = tarfile.TarInfo(arcpath)
+            tar_info.type = tarfile.REGTYPE
+            tar_info.mode = mode
+            tar_info.size = len(data)
+            tar_info.mtime = int(time.timestamp())
+            tf.addfile(tar_info, fileobj=io.BytesIO(data))
+
+        self._added_files.add(arcpath)
+
+    def add_symlink(self, arcpath: str, target: str, time: datetime.datetime, files_for_zip):
+        logger.debug("Adding symlink (target=%r) -> %s", target, arcpath)
+        for zf in self._zip_files:
+            file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
+            for f in files_for_zip:
+                zip_info = zipfile.ZipInfo(filename=f["arcpath"], date_time=file_data_time)
+                zip_info.external_attr = f["mode"] << 16
+                zip_info.compress_type = zipfile.ZIP_DEFLATED
+                zf.writestr(zip_info, data=f["data"])
+        for tf in self._tar_files:
+            tar_info = tarfile.TarInfo(arcpath)
+            tar_info.type = tarfile.SYMTYPE
+            tar_info.mode = 0o777
+            tar_info.mtime = int(time.timestamp())
+            tar_info.linkname = target
+            tf.addfile(tar_info)
+
+        self._added_files.update(f["arcpath"] for f in files_for_zip)
+
+    def add_git_hash(self, arcdir: str, commit: str, time: datetime.datetime):
+        arcpath = arc_join(arcdir, GIT_HASH_FILENAME)
+        data = f"{commit}\n".encode()
+        self.add_file_data(arcpath=arcpath, data=data, mode=0o100644, time=time)
+
+    def add_file_path(self, arcpath: str, path: Path):
+        assert path.is_file(), f"{path} should be a file"
+        logger.debug("Adding %s -> %s", path, arcpath)
+        for zf in self._zip_files:
+            zf.write(path, arcname=arcpath)
+        for tf in self._tar_files:
+            tf.add(path, arcname=arcpath)
+
+    def add_file_directory(self, arcdirpath: str, dirpath: Path):
+        assert dirpath.is_dir()
+        if arcdirpath and arcdirpath[-1:] != "/":
+            arcdirpath += "/"
+        for f in dirpath.iterdir():
+            if f.is_file():
+                arcpath = f"{arcdirpath}{f.name}"
+                logger.debug("Adding %s to %s", f, arcpath)
+                self.add_file_path(arcpath=arcpath, path=f)
+
+    def close(self):
+        # Archiver is intentionally made invalid after this function
+        del self._zip_files
+        self._zip_files = None
+        del self._tar_files
+        self._tar_files = None
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, traceback):
+        self.close()
+
+
+class NodeInArchive:
+    def __init__(self, arcpath: str, path: typing.Optional[Path]=None, data: typing.Optional[bytes]=None, mode: typing.Optional[int]=None, symtarget: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None, directory: bool=False):
+        self.arcpath = arcpath
+        self.path = path
+        self.data = data
+        self.mode = mode
+        self.symtarget = symtarget
+        self.time = time
+        self.directory = directory
+
+    @classmethod
+    def from_fs(cls, arcpath: str, path: Path, mode: int=0o100644, time: typing.Optional[datetime.datetime]=None) -> "NodeInArchive":
+        if time is None:
+            time = datetime.datetime.fromtimestamp(os.stat(path).st_mtime)
+        return cls(arcpath=arcpath, path=path, mode=mode)
+
+    @classmethod
+    def from_data(cls, arcpath: str, data: bytes, time: datetime.datetime) -> "NodeInArchive":
+        return cls(arcpath=arcpath, data=data, time=time, mode=0o100644)
+
+    @classmethod
+    def from_text(cls, arcpath: str, text: str, time: datetime.datetime) -> "NodeInArchive":
+        return cls.from_data(arcpath=arcpath, data=text.encode(), time=time)
+
+    @classmethod
+    def from_symlink(cls, arcpath: str, symtarget: str) -> "NodeInArchive":
+        return cls(arcpath=arcpath, symtarget=symtarget)
+
+    @classmethod
+    def from_directory(cls, arcpath: str) -> "NodeInArchive":
+        return cls(arcpath=arcpath, directory=True)
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__}:arcpath={self.arcpath},path='{str(self.path)}',len(data)={len(self.data) if self.data else 'n/a'},directory={self.directory},symtarget={self.symtarget}>"
+
+
+def configure_file(path: Path, context: dict[str, str]) -> bytes:
+    text = path.read_text()
+    return configure_text(text, context=context).encode()
+
+
+def configure_text(text: str, context: dict[str, str]) -> str:
+    original_text = text
+    for txt, repl in context.items():
+        text = text.replace(f"@<@{txt}@>@", repl)
+    success = all(thing not in text for thing in ("@<@", "@>@"))
+    if not success:
+        raise ValueError(f"Failed to configure {repr(original_text)}")
+    return text
+
+
+class ArchiveFileTree:
+    def __init__(self):
+        self._tree: dict[str, NodeInArchive] = {}
+
+    def add_file(self, file: NodeInArchive):
+        self._tree[file.arcpath] = file
+
+    def get_latest_mod_time(self) -> datetime.datetime:
+        return max(item.time for item in self._tree.values() if item.time)
+
+    def add_to_archiver(self, archive_base: str, archiver: Archiver):
+        remaining_symlinks = set()
+        added_files = dict()
+
+        def calculate_symlink_target(s: NodeInArchive) -> str:
+            dest_dir = os.path.dirname(s.path)
+            if dest_dir:
+                dest_dir += "/"
+            target = dest_dir + s.symtarget
+            while True:
+                new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target)
+                print(f"{target=} {new_target=}")
+                target = new_target
+                if not n:
+                    break
+            return target
+
+        # Add files in first pass
+        for arcpath, node in self._tree.items():
+            if node.data is not None:
+                archiver.add_file_data(arcpath=arc_join(archive_base, arcpath), data=node.data, time=node.time, mode=node.mode)
+                added_files[node.path] = node
+            elif node.path is not None:
+                archiver.add_file_path(arcpath=arc_join(archive_base, arcpath), path=node.path)
+                added_files[node.path] = node
+            elif node.symtarget is not None:
+                remaining_symlinks.add(node)
+            elif node.directory:
+                pass
+            else:
+                raise ValueError(f"Invalid Archive Node: {repr(node)}")
+
+        # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive
+        while True:
+            if not remaining_symlinks:
+                break
+            symlinks_this_time = set()
+            extra_added_files = {}
+            for symlink in remaining_symlinks:
+                symlink_files_for_zip = {}
+                symlink_target_path = calculate_symlink_target(symlink)
+                if symlink_target_path in added_files:
+                    symlink_files_for_zip[symlink.path] = added_files[symlink_target_path]
+                else:
+                    symlink_target_path_slash = symlink_target_path + "/"
+                    for added_file in added_files:
+                        if added_file.startswith(symlink_target_path_slash):
+                            path_in_symlink = symlink.path + "/" + added_file.removeprefix(symlink_target_path_slash)
+                            symlink_files_for_zip[path_in_symlink] = added_files[added_file]
+                if symlink_files_for_zip:
+                    symlinks_this_time.add(symlink)
+                    extra_added_files.update(symlink_files_for_zip)
+                    files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()]
+                    archiver.add_symlink(arcpath=f"{archive_base}/{symlink.path}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip)
+            # if not symlinks_this_time:
+            #     logger.info("files added: %r", set(path for path in added_files.keys()))
+            assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}"
+            remaining_symlinks.difference_update(symlinks_this_time)
+            added_files.update(extra_added_files)
+
+    def add_directory_tree(self, arc_dir: str, path: Path, time: datetime.datetime):
+        assert path.is_dir()
+        for files_dir, _, filenames in os.walk(path):
+            files_dir_path = Path(files_dir)
+            rel_files_path = files_dir_path.relative_to(path)
+            for filename in filenames:
+                self.add_file(NodeInArchive.from_fs(arcpath=arc_join(arc_dir, str(rel_files_path), filename), path=files_dir_path / filename, time=time))
+
+    def _add_files_recursively(self, arc_dir: str, paths: list[Path], time: datetime.datetime):
+        logger.debug(f"_add_files_recursively({arc_dir=} {paths=})")
+        for path in paths:
+            arcpath = arc_join(arc_dir, path.name)
+            if path.is_file():
+                logger.debug("Adding %s as %s", path, arcpath)
+                self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time))
+            elif path.is_dir():
+                self._add_files_recursively(arc_dir=arc_join(arc_dir, path.name), paths=list(path.iterdir()), time=time)
+            else:
+                raise ValueError(f"Unsupported file type to add recursively: {path}")
+
+    def add_file_mapping(self, arc_dir: str, file_mapping: dict[str, list[str]], file_mapping_root: Path, context: dict[str, str], time: datetime.datetime):
+        for meta_rel_destdir, meta_file_globs in file_mapping.items():
+            rel_destdir = configure_text(meta_rel_destdir, context=context)
+            assert "@" not in rel_destdir, f"archive destination should not contain an @ after configuration ({repr(meta_rel_destdir)}->{repr(rel_destdir)})"
+            for meta_file_glob in meta_file_globs:
+                file_glob = configure_text(meta_file_glob, context=context)
+                assert "@" not in rel_destdir, f"archive glob should not contain an @ after configuration ({repr(meta_file_glob)}->{repr(file_glob)})"
+                if ":" in file_glob:
+                    original_path, new_filename = file_glob.rsplit(":", 1)
+                    assert ":" not in original_path, f"Too many ':' in {repr(file_glob)}"
+                    assert "/" not in new_filename, f"New filename cannot contain a '/' in {repr(file_glob)}"
+                    path = file_mapping_root / original_path
+                    arcpath = arc_join(arc_dir, rel_destdir, new_filename)
+                    if path.suffix == ".in":
+                        data = configure_file(path, context=context)
+                        logger.debug("Adding processed %s -> %s", path, arcpath)
+                        self.add_file(NodeInArchive.from_data(arcpath=arcpath, data=data, time=time))
+                    else:
+                        logger.debug("Adding %s -> %s", path, arcpath)
+                        self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time))
+                else:
+                    relative_file_paths = glob.glob(file_glob, root_dir=file_mapping_root)
+                    assert relative_file_paths, f"Glob '{file_glob}' does not match any file"
+                    self._add_files_recursively(arc_dir=arc_join(arc_dir, rel_destdir), paths=[file_mapping_root / p for p in relative_file_paths], time=time)
+
+
+class SourceCollector:
+    # TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "symtarget", "directory", "time"))
+    def __init__(self, root: Path, commit: str, filter: typing.Optional[Callable[[str], bool]], executer: Executer):
         self.root = root
         self.root = root
         self.commit = commit
         self.commit = commit
-        self.dist_path = dist_path
-        self.section_printer = section_printer
+        self.filter = filter
         self.executer = executer
         self.executer = executer
-        self.cmake_generator = cmake_generator
-
-        self.artifacts: dict[str, Path] = {}
 
 
-    @property
-    def dry(self) -> bool:
-        return self.executer.dry
-
-    def prepare(self):
-        logger.debug("Creating dist folder")
-        self.dist_path.mkdir(parents=True, exist_ok=True)
+    def get_archive_file_tree(self) -> ArchiveFileTree:
+        git_archive_args = ["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"]
+        logger.info("Executing args=%r", git_archive_args)
+        contents_tgz = subprocess.check_output(git_archive_args, cwd=self.root, text=False)
+        tar_archive = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz")
+        filenames = tuple(m.name for m in tar_archive if (m.isfile() or m.issym()))
+
+        file_times = self._get_file_times(paths=filenames)
+        git_contents = ArchiveFileTree()
+        for ti in tar_archive:
+            if self.filter and not self.filter(ti.name):
+                continue
+            data = None
+            symtarget = None
+            directory = False
+            file_time = None
+            if ti.isfile():
+                contents_file = tar_archive.extractfile(ti.name)
+                data = contents_file.read()
+                file_time = file_times[ti.name]
+            elif ti.issym():
+                symtarget = ti.linkname
+                file_time = file_times[ti.name]
+            elif ti.isdir():
+                directory = True
+            else:
+                raise ValueError(f"{ti.name}: unknown type")
+            node = NodeInArchive(arcpath=ti.name, data=data, mode=ti.mode, symtarget=symtarget, time=file_time, directory=directory)
+            git_contents.add_file(node)
+        return git_contents
 
 
-    TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "time"))
     def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime]:
     def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime]:
         dry_out = textwrap.dedent("""\
         dry_out = textwrap.dedent("""\
             time=2024-03-14T15:40:25-07:00
             time=2024-03-14T15:40:25-07:00
 
 
             M\tCMakeLists.txt
             M\tCMakeLists.txt
         """)
         """)
-        git_log_out = self.executer.run(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], stdout=True, dry_out=dry_out).stdout.splitlines(keepends=False)
+        git_log_out = self.executer.check_output(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], dry_out=dry_out, cwd=self.root).splitlines(keepends=False)
         current_time = None
         current_time = None
         set_paths = set(paths)
         set_paths = set(paths)
         path_times: dict[str, datetime.datetime] = {}
         path_times: dict[str, datetime.datetime] = {}
@@ -175,103 +497,159 @@ class Releaser:
             if not line:
             if not line:
                 continue
                 continue
             if line.startswith("time="):
             if line.startswith("time="):
-                current_time = datetime.datetime.fromisoformat(line.removeprefix("time="))
+                current_time = safe_isotime_to_datetime(line.removeprefix("time="))
                 continue
                 continue
             mod_type, file_paths = line.split(maxsplit=1)
             mod_type, file_paths = line.split(maxsplit=1)
             assert current_time is not None
             assert current_time is not None
             for file_path in file_paths.split("\t"):
             for file_path in file_paths.split("\t"):
                 if file_path in set_paths and file_path not in path_times:
                 if file_path in set_paths and file_path not in path_times:
                     path_times[file_path] = current_time
                     path_times[file_path] = current_time
-        assert set(path_times.keys()) == set_paths
+
+        # FIXME: find out why some files are not shown in "git log"
+        # assert set(path_times.keys()) == set_paths
+        if set(path_times.keys()) != set_paths:
+            found_times = set(path_times.keys())
+            paths_without_times = set_paths.difference(found_times)
+            logger.warning("No times found for these paths: %s", paths_without_times)
+            max_time = max(time for time in path_times.values())
+            for path in paths_without_times:
+                path_times[path] = max_time
+
         return path_times
         return path_times
 
 
-    @staticmethod
-    def _path_filter(path: str):
+
+class Releaser:
+    def __init__(self, release_info: dict, commit: str, revision: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool):
+        self.release_info = release_info
+        self.project = release_info["name"]
+        self.version = self.extract_sdl_version(root=root, release_info=release_info)
+        self.root = root
+        self.commit = commit
+        self.revision = revision
+        self.dist_path = dist_path
+        self.section_printer = section_printer
+        self.executer = executer
+        self.cmake_generator = cmake_generator
+        self.cpu_count = multiprocessing.cpu_count()
+        self.deps_path = deps_path
+        self.overwrite = overwrite
+        self.github = github
+        self.fast = fast
+        self.arc_time = datetime.datetime.now()
+
+        self.artifacts: dict[str, Path] = {}
+
+    def get_context(self, extra_context: typing.Optional[dict[str, str]]=None) -> dict[str, str]:
+        ctx = {
+            "PROJECT_NAME": self.project,
+            "PROJECT_VERSION": self.version,
+            "PROJECT_COMMIT": self.commit,
+            "PROJECT_REVISION": self.revision,
+        }
+        if extra_context:
+            ctx.update(extra_context)
+        return ctx
+
+    @property
+    def dry(self) -> bool:
+        return self.executer.dry
+
+    def prepare(self):
+        logger.debug("Creating dist folder")
+        self.dist_path.mkdir(parents=True, exist_ok=True)
+
+    @classmethod
+    def _path_filter(cls, path: str) -> bool:
+        if ".gitmodules" in path:
+            return True
         if path.startswith(".git"):
         if path.startswith(".git"):
             return False
             return False
         return True
         return True
 
 
-    def _get_git_contents(self) -> dict[str, TreeItem]:
-        contents_tgz = subprocess.check_output(["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"], text=False)
-        contents = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz")
-        filenames = tuple(m.name for m in contents if m.isfile())
-        assert "src/SDL.c" in filenames
-        assert "include/SDL3/SDL.h" in filenames
-        file_times = self._get_file_times(filenames)
-        git_contents = {}
-        for ti in contents:
-            if not ti.isfile():
-                continue
-            if not self._path_filter(ti.name):
-                continue
-            contents_file = contents.extractfile(ti.name)
-            assert contents_file, f"{ti.name} is not a file"
-            git_contents[ti.name] = self.TreeItem(path=ti.name, mode=ti.mode, data=contents_file.read(), time=file_times[ti.name])
-        return git_contents
+    @classmethod
+    def _external_repo_path_filter(cls, path: str) -> bool:
+        if not cls._path_filter(path):
+            return False
+        if path.startswith("test/") or path.startswith("tests/"):
+            return False
+        return True
 
 
     def create_source_archives(self) -> None:
     def create_source_archives(self) -> None:
+        source_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter)
+        print(f"Collecting sources of {self.project}...")
+        archive_tree = source_collector.get_archive_file_tree()
+        latest_mod_time = archive_tree.get_latest_mod_time()
+        archive_tree.add_file(NodeInArchive.from_text(arcpath=REVISION_TXT, text=f"{self.revision}\n", time=latest_mod_time))
+        archive_tree.add_file(NodeInArchive.from_text(arcpath=f"{GIT_HASH_FILENAME}", text=f"{self.commit}\n", time=latest_mod_time))
+        archive_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["source"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=latest_mod_time)
+
         archive_base = f"{self.project}-{self.version}"
         archive_base = f"{self.project}-{self.version}"
+        zip_path = self.dist_path / f"{archive_base}.zip"
+        tgz_path = self.dist_path / f"{archive_base}.tar.gz"
+        txz_path = self.dist_path / f"{archive_base}.tar.xz"
 
 
-        git_contents = self._get_git_contents()
-        git_files = list(git_contents.values())
-        assert len(git_contents) == len(git_files)
+        logger.info("Creating zip/tgz/txz source archives ...")
+        if self.dry:
+            zip_path.touch()
+            tgz_path.touch()
+            txz_path.touch()
+        else:
+            with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
+                print(f"Adding source files of {self.project}...")
+                archive_tree.add_to_archiver(archive_base=archive_base, archiver=archiver)
 
 
-        latest_mod_time = max(item.time for item in git_files)
+                for extra_repo in self.release_info["source"].get("extra-repos", []):
+                    extra_repo_root = self.root / extra_repo
+                    assert (extra_repo_root / ".git").exists(), f"{extra_repo_root} must be a git repo"
+                    extra_repo_commit = self.executer.check_output(["git", "rev-parse", "HEAD"], dry_out=f"gitsha-extra-repo-{extra_repo}", cwd=extra_repo_root).strip()
+                    extra_repo_source_collector = SourceCollector(root=extra_repo_root, commit=extra_repo_commit, executer=self.executer, filter=self._external_repo_path_filter)
+                    print(f"Collecting sources of {extra_repo} ...")
+                    extra_repo_archive_tree = extra_repo_source_collector.get_archive_file_tree()
+                    print(f"Adding source files of {extra_repo} ...")
+                    extra_repo_archive_tree.add_to_archiver(archive_base=f"{archive_base}/{extra_repo}", archiver=archiver)
 
 
-        git_files.append(self.TreeItem(path="VERSION.txt", data=f"{self.version}\n".encode(), mode=0o100644, time=latest_mod_time))
-        git_files.append(self.TreeItem(path=GIT_HASH_FILENAME, data=f"{self.commit}\n".encode(), mode=0o100644, time=latest_mod_time))
+            for file in self.release_info["source"]["checks"]:
+                assert f"{archive_base}/{file}" in archiver.added_files, f"'{archive_base}/{file}' must exist"
 
 
-        git_files.sort(key=lambda v: v.time)
+        logger.info("... done")
 
 
-        zip_path = self.dist_path / f"{archive_base}.zip"
-        logger.info("Creating .zip source archive (%s)...", zip_path)
-        if self.dry:
-            zip_path.touch()
-        else:
-            with zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED) as zip_object:
-                for git_file in git_files:
-                    file_data_time = (git_file.time.year, git_file.time.month, git_file.time.day, git_file.time.hour, git_file.time.minute, git_file.time.second)
-                    zip_info = zipfile.ZipInfo(filename=f"{archive_base}/{git_file.path}", date_time=file_data_time)
-                    zip_info.external_attr = git_file.mode << 16
-                    zip_info.compress_type = zipfile.ZIP_DEFLATED
-                    zip_object.writestr(zip_info, data=git_file.data)
         self.artifacts["src-zip"] = zip_path
         self.artifacts["src-zip"] = zip_path
+        self.artifacts["src-tar-gz"] = tgz_path
+        self.artifacts["src-tar-xz"] = txz_path
 
 
-        tar_types = (
-            (".tar.gz", "gz"),
-            (".tar.xz", "xz"),
-        )
-        for ext, comp in tar_types:
-            tar_path = self.dist_path / f"{archive_base}{ext}"
-            logger.info("Creating %s source archive (%s)...", ext, tar_path)
-            if self.dry:
-                tar_path.touch()
-            else:
-                with tarfile.open(tar_path, f"w:{comp}") as tar_object:
-                    for git_file in git_files:
-                        tar_info = tarfile.TarInfo(f"{archive_base}/{git_file.path}")
-                        tar_info.mode = git_file.mode
-                        tar_info.size = len(git_file.data)
-                        tar_info.mtime = git_file.time.timestamp()
-                        tar_object.addfile(tar_info, fileobj=io.BytesIO(git_file.data))
-
-            if tar_path.suffix == ".gz":
+        if not self.dry:
+            with tgz_path.open("r+b") as f:
                 # Zero the embedded timestamp in the gzip'ed tarball
                 # Zero the embedded timestamp in the gzip'ed tarball
-                with open(tar_path, "r+b") as f:
-                    f.seek(4, 0)
-                    f.write(b"\x00\x00\x00\x00")
-
-            self.artifacts[f"src-tar-{comp}"] = tar_path
-
-    def create_xcframework(self, configuration: str="Release") -> None:
-        dmg_in = self.root / f"Xcode/SDL/build/{self.project}.dmg"
-        dmg_in.unlink(missing_ok=True)
-        self.executer.run(["xcodebuild", "-project", str(self.root / "Xcode/SDL/SDL.xcodeproj"), "-target", "SDL3.dmg", "-configuration", configuration])
+                f.seek(4, 0)
+                f.write(b"\x00\x00\x00\x00")
+
+    def create_dmg(self, configuration: str="Release") -> None:
+        dmg_in = self.root / self.release_info["dmg"]["path"]
+        xcode_project = self.root / self.release_info["dmg"]["project"]
+        assert xcode_project.is_dir(), f"{xcode_project} must be a directory"
+        assert (xcode_project / "project.pbxproj").is_file, f"{xcode_project} must contain project.pbxproj"
+        if not self.fast:
+            dmg_in.unlink(missing_ok=True)
+        build_xcconfig = self.release_info["dmg"].get("build-xcconfig")
+        if build_xcconfig:
+            shutil.copy(self.root / build_xcconfig, xcode_project.parent / "build.xcconfig")
+
+        xcode_scheme = self.release_info["dmg"].get("scheme")
+        xcode_target = self.release_info["dmg"].get("target")
+        assert xcode_scheme or xcode_target, "dmg needs scheme or target"
+        assert not (xcode_scheme and xcode_target), "dmg cannot have both scheme and target set"
+        if xcode_scheme:
+            scheme_or_target = "-scheme"
+            target_like = xcode_scheme
+        else:
+            scheme_or_target = "-target"
+            target_like = xcode_target
+        self.executer.run(["xcodebuild", "ONLY_ACTIVE_ARCH=NO", "-project", xcode_project, scheme_or_target, target_like, "-configuration", configuration])
         if self.dry:
         if self.dry:
             dmg_in.parent.mkdir(parents=True, exist_ok=True)
             dmg_in.parent.mkdir(parents=True, exist_ok=True)
             dmg_in.touch()
             dmg_in.touch()
 
 
-        assert dmg_in.is_file(), "SDL3.dmg was not created by xcodebuild"
+        assert dmg_in.is_file(), f"{self.project}.dmg was not created by xcodebuild"
 
 
         dmg_out = self.dist_path / f"{self.project}-{self.version}.dmg"
         dmg_out = self.dist_path / f"{self.project}-{self.version}.dmg"
         shutil.copy(dmg_in, dmg_out)
         shutil.copy(dmg_in, dmg_out)
@@ -281,271 +659,169 @@ class Releaser:
     def git_hash_data(self) -> bytes:
     def git_hash_data(self) -> bytes:
         return f"{self.commit}\n".encode()
         return f"{self.commit}\n".encode()
 
 
-    def _tar_add_git_hash(self, tar_object: tarfile.TarFile, root: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None):
-        if not time:
-            time = datetime.datetime(year=2024, month=4, day=1)
-        path = GIT_HASH_FILENAME
-        if root:
-            path = f"{root}/{path}"
-
-        tar_info = tarfile.TarInfo(path)
-        tar_info.mode = 0o100644
-        tar_info.size = len(self.git_hash_data)
-        tar_info.mtime = int(time.timestamp())
-        tar_object.addfile(tar_info, fileobj=io.BytesIO(self.git_hash_data))
-
-    def _zip_add_git_hash(self, zip_file: zipfile.ZipFile, root: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None):
-        if not time:
-            time = datetime.datetime(year=2024, month=4, day=1)
-        path = GIT_HASH_FILENAME
-        if root:
-            path = f"{root}/{path}"
-
-        file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
-        zip_info = zipfile.ZipInfo(filename=path, date_time=file_data_time)
-        zip_info.external_attr = 0o100644 << 16
-        zip_info.compress_type = zipfile.ZIP_DEFLATED
-        zip_file.writestr(zip_info, data=self.git_hash_data)
-
     def create_mingw_archives(self) -> None:
     def create_mingw_archives(self) -> None:
         build_type = "Release"
         build_type = "Release"
-        mingw_archs = ("i686", "x86_64")
         build_parent_dir = self.root / "build-mingw"
         build_parent_dir = self.root / "build-mingw"
-
+        ARCH_TO_GNU_ARCH = {
+            # "arm64": "aarch64",
+            "x86": "i686",
+            "x64": "x86_64",
+        }
+        ARCH_TO_TRIPLET = {
+            # "arm64": "aarch64-w64-mingw32",
+            "x86": "i686-w64-mingw32",
+            "x64": "x86_64-w64-mingw32",
+        }
+
+        new_env = dict(os.environ)
+
+        cmake_prefix_paths = []
+        mingw_deps_path = self.deps_path / "mingw-deps"
+
+        if "dependencies" in self.release_info["mingw"]:
+            shutil.rmtree(mingw_deps_path, ignore_errors=True)
+            mingw_deps_path.mkdir()
+
+            for triplet in ARCH_TO_TRIPLET.values():
+                (mingw_deps_path / triplet).mkdir()
+
+            def extract_filter(member: tarfile.TarInfo, path: str, /):
+                if member.name.startswith("SDL"):
+                    member.name = "/".join(Path(member.name).parts[1:])
+                return member
+            for dep in self.release_info.get("dependencies", {}):
+                extract_path = mingw_deps_path / f"extract-{dep}"
+                extract_path.mkdir()
+                with chdir(extract_path):
+                    tar_path = self.deps_path / glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)[0]
+                    logger.info("Extracting %s to %s", tar_path, mingw_deps_path)
+                    assert tar_path.suffix in (".gz", ".xz")
+                    with tarfile.open(tar_path, mode=f"r:{tar_path.suffix.strip('.')}") as tarf:
+                        tarf.extractall(filter=extract_filter)
+                    for arch, triplet in ARCH_TO_TRIPLET.items():
+                        install_cmd = self.release_info["mingw"]["dependencies"][dep]["install-command"]
+                        extra_configure_data = {
+                            "ARCH": ARCH_TO_GNU_ARCH[arch],
+                            "TRIPLET": triplet,
+                            "PREFIX": str(mingw_deps_path / triplet),
+                        }
+                        install_cmd = configure_text(install_cmd, context=self.get_context(extra_configure_data))
+                        self.executer.run(shlex.split(install_cmd), cwd=str(extract_path))
+
+            dep_binpath = mingw_deps_path / triplet / "bin"
+            assert dep_binpath.is_dir(), f"{dep_binpath} for PATH should exist"
+            dep_pkgconfig = mingw_deps_path / triplet / "lib/pkgconfig"
+            assert dep_pkgconfig.is_dir(), f"{dep_pkgconfig} for PKG_CONFIG_PATH should exist"
+
+            new_env["PATH"] = os.pathsep.join([str(dep_binpath), new_env["PATH"]])
+            new_env["PKG_CONFIG_PATH"] = str(dep_pkgconfig)
+            cmake_prefix_paths.append(mingw_deps_path)
+
+        new_env["CFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
+        new_env["CXXFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
+
+        assert any(system in self.release_info["mingw"] for system in ("autotools", "cmake"))
+        assert not all(system in self.release_info["mingw"] for system in ("autotools", "cmake"))
+
+        mingw_archs = set()
+        arc_root = f"{self.project}-{self.version}"
+        archive_file_tree = ArchiveFileTree()
+
+        if "autotools" in self.release_info["mingw"]:
+            for arch in self.release_info["mingw"]["autotools"]["archs"]:
+                triplet = ARCH_TO_TRIPLET[arch]
+                new_env["CC"] = f"{triplet}-gcc"
+                new_env["CXX"] = f"{triplet}-g++"
+                new_env["RC"] = f"{triplet}-windres"
+
+                assert arch not in mingw_archs
+                mingw_archs.add(arch)
+
+                build_path = build_parent_dir / f"build-{triplet}"
+                install_path = build_parent_dir / f"install-{triplet}"
+                shutil.rmtree(install_path, ignore_errors=True)
+                build_path.mkdir(parents=True, exist_ok=True)
+                with self.section_printer.group(f"Configuring MinGW {triplet} (autotools)"):
+                    extra_args = [arg.replace("@DEP_PREFIX@", str(mingw_deps_path / triplet)) for arg in self.release_info["mingw"]["autotools"]["args"]]
+                    assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
+                    self.executer.run([
+                        self.root / "configure",
+                        f"--prefix={install_path}",
+                        f"--includedir={install_path}/include",
+                        f"--libdir={install_path}/lib",
+                        f"--bindir={install_path}/bin",
+                        f"--host={triplet}",
+                        f"--build=x86_64-none-linux-gnu",
+                    ] + extra_args, cwd=build_path, env=new_env)
+                with self.section_printer.group(f"Build MinGW {triplet} (autotools)"):
+                    self.executer.run(["make", f"-j{self.cpu_count}"], cwd=build_path, env=new_env)
+                with self.section_printer.group(f"Install MinGW {triplet} (autotools)"):
+                    self.executer.run(["make", "install"], cwd=build_path, env=new_env)
+                archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path)
+
+        if "cmake" in self.release_info["mingw"]:
+            assert self.release_info["mingw"]["cmake"]["shared-static"] in ("args", "both")
+            for arch in self.release_info["mingw"]["cmake"]["archs"]:
+                triplet = ARCH_TO_TRIPLET[arch]
+                new_env["CC"] = f"{triplet}-gcc"
+                new_env["CXX"] = f"{triplet}-g++"
+                new_env["RC"] = f"{triplet}-windres"
+
+                assert arch not in mingw_archs
+                mingw_archs.add(arch)
+
+                build_path = build_parent_dir / f"build-{triplet}"
+                install_path = build_parent_dir / f"install-{triplet}"
+                shutil.rmtree(install_path, ignore_errors=True)
+                build_path.mkdir(parents=True, exist_ok=True)
+                if self.release_info["mingw"]["cmake"]["shared-static"] == "args":
+                    args_for_shared_static = ([], )
+                elif self.release_info["mingw"]["cmake"]["shared-static"] == "both":
+                    args_for_shared_static = (["-DBUILD_SHARED_LIBS=ON"], ["-DBUILD_SHARED_LIBS=OFF"])
+                for arg_for_shared_static in args_for_shared_static:
+                    with self.section_printer.group(f"Configuring MinGW {triplet} (CMake)"):
+                        extra_args = [arg.replace("@DEP_PREFIX@", str(mingw_deps_path / triplet)) for arg in self.release_info["mingw"]["cmake"]["args"]]
+                        assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
+                        self.executer.run([
+                            f"cmake",
+                            f"-S", str(self.root), "-B", str(build_path),
+                            f"-DCMAKE_BUILD_TYPE={build_type}",
+                            f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
+                            f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
+                            f"-DCMAKE_PREFIX_PATH={mingw_deps_path / triplet}",
+                            f"-DCMAKE_INSTALL_PREFIX={install_path}",
+                            f"-DCMAKE_INSTALL_INCLUDEDIR=include",
+                            f"-DCMAKE_INSTALL_LIBDIR=lib",
+                            f"-DCMAKE_INSTALL_BINDIR=bin",
+                            f"-DCMAKE_INSTALL_DATAROOTDIR=share",
+                            f"-DCMAKE_TOOLCHAIN_FILE={self.root}/build-scripts/cmake-toolchain-mingw64-{ARCH_TO_GNU_ARCH[arch]}.cmake",
+                            f"-G{self.cmake_generator}",
+                        ] + extra_args + ([] if self.fast else ["--fresh"]) + arg_for_shared_static, cwd=build_path, env=new_env)
+                    with self.section_printer.group(f"Build MinGW {triplet} (CMake)"):
+                        self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type], cwd=build_path, env=new_env)
+                    with self.section_printer.group(f"Install MinGW {triplet} (CMake)"):
+                        self.executer.run(["cmake", "--install", str(build_path)], cwd=build_path, env=new_env)
+                archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time)
+
+        print("Recording extra files for MinGW development archive ...")
+        archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
+        print("... done")
+
+        print("Creating zip/tgz/txz development archives ...")
         zip_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.zip"
         zip_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.zip"
-        tar_exts = ("gz", "xz")
-        tar_paths = { ext: self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.{ext}" for ext in tar_exts}
-
-        arch_install_paths = {}
-        arch_files = {}
-
-        for arch in mingw_archs:
-            build_path = build_parent_dir / f"build-{arch}"
-            install_path = build_parent_dir / f"install-{arch}"
-            arch_install_paths[arch] = install_path
-            shutil.rmtree(install_path, ignore_errors=True)
-            build_path.mkdir(parents=True, exist_ok=True)
-            with self.section_printer.group(f"Configuring MinGW {arch}"):
-                self.executer.run([
-                    "cmake", "-S", str(self.root), "-B", str(build_path),
-                    "--fresh",
-                    f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
-                    f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
-                    "-DSDL_SHARED=ON",
-                    "-DSDL_STATIC=ON",
-                    "-DSDL_DISABLE_INSTALL_DOCS=ON",
-                    "-DSDL_TEST_LIBRARY=ON",
-                    "-DSDL_TESTS=OFF",
-                    "-DCMAKE_INSTALL_BINDIR=bin",
-                    "-DCMAKE_INSTALL_DATAROOTDIR=share",
-                    "-DCMAKE_INSTALL_INCLUDEDIR=include",
-                    "-DCMAKE_INSTALL_LIBDIR=lib",
-                    f"-DCMAKE_BUILD_TYPE={build_type}",
-                    f"-DCMAKE_TOOLCHAIN_FILE={self.root}/build-scripts/cmake-toolchain-mingw64-{arch}.cmake",
-                    f"-G{self.cmake_generator}",
-                    f"-DCMAKE_INSTALL_PREFIX={install_path}",
-                ])
-            with self.section_printer.group(f"Build MinGW {arch}"):
-                self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type])
-            with self.section_printer.group(f"Install MinGW {arch}"):
-                self.executer.run(["cmake", "--install", str(build_path), "--strip", "--config", build_type])
-            arch_files[arch] = list(Path(r) / f for r, _, files in os.walk(install_path) for f in files)
-
-        extra_files = (
-            ("build-scripts/pkg-support/mingw/INSTALL.txt", ""),
-            ("build-scripts/pkg-support/mingw/Makefile", ""),
-            ("build-scripts/pkg-support/mingw/cmake/SDL3Config.cmake", "cmake/"),
-            ("build-scripts/pkg-support/mingw/cmake/SDL3ConfigVersion.cmake", "cmake/"),
-            ("BUGS.txt", ""),
-            ("CREDITS.md", ""),
-            ("README-SDL.txt", ""),
-            ("WhatsNew.txt", ""),
-            ("LICENSE.txt", ""),
-            ("README.md", ""),
-            ("docs/*", "docs/"),
-        )
-        test_files = list(Path(r) / f for r, _, files in os.walk(self.root / "test") for f in files)
-
-        # FIXME: split SDL3.dll debug information into debug library
-        # objcopy --only-keep-debug SDL3.dll SDL3.debug.dll
-        # objcopy --add-gnu-debuglink=SDL3.debug.dll SDL3.dll
-        # objcopy --strip-debug SDL3.dll
-
-        for comp in tar_exts:
-            logger.info("Creating %s...", tar_paths[comp])
-            with tarfile.open(tar_paths[comp], f"w:{comp}") as tar_object:
-                arc_root = f"{self.project}-{self.version}"
-                for file_path_glob, arcdirname in extra_files:
-                    assert not arcdirname or arcdirname[-1] == "/"
-                    for file_path in glob.glob(file_path_glob, root_dir=self.root):
-                        file_path = self.root / file_path
-                        arcname = f"{arc_root}/{arcdirname}{Path(file_path).name}"
-                        tar_object.add(file_path, arcname=arcname)
-                for arch in mingw_archs:
-                    install_path = arch_install_paths[arch]
-                    arcname_parent = f"{arc_root}/{arch}-w64-mingw32"
-                    for file in arch_files[arch]:
-                        arcname = os.path.join(arcname_parent, file.relative_to(install_path))
-                        tar_object.add(file, arcname=arcname)
-                for test_file in test_files:
-                    arcname = f"{arc_root}/test/{test_file.relative_to(self.root/'test')}"
-                    tar_object.add(test_file, arcname=arcname)
-                self._tar_add_git_hash(tar_object=tar_object, root=arc_root)
-
-                self.artifacts[f"mingw-devel-tar-{comp}"] = tar_paths[comp]
-
-    def build_vs(self, arch: str, platform: str, vs: VisualStudio, configuration: str="Release") -> VcArchDevel:
-        dll_path = self.root / f"VisualC/SDL/{platform}/{configuration}/{self.project}.dll"
-        pdb_path = self.root / f"VisualC/SDL/{platform}/{configuration}/{self.project}.pdb"
-        imp_path = self.root / f"VisualC/SDL/{platform}/{configuration}/{self.project}.lib"
-        test_path = self.root / f"VisualC/SDL_test/{platform}/{configuration}/{self.project}_test.lib"
-
-        dll_path.unlink(missing_ok=True)
-        pdb_path.unlink(missing_ok=True)
-        imp_path.unlink(missing_ok=True)
-        test_path.unlink(missing_ok=True)
-
-        projects = [
-            self.root / "VisualC/SDL/SDL.vcxproj",
-            self.root / "VisualC/SDL_test/SDL_test.vcxproj",
-        ]
-
-        with self.section_printer.group(f"Build {arch} VS binary"):
-            vs.build(arch=arch, platform=platform, configuration=configuration, projects=projects)
-
-        if self.dry:
-            dll_path.parent.mkdir(parents=True, exist_ok=True)
-            dll_path.touch()
-            pdb_path.touch()
-            imp_path.touch()
-            test_path.parent.mkdir(parents=True, exist_ok=True)
-            test_path.touch()
-
-        assert dll_path.is_file(), "SDL3.dll has not been created"
-        assert pdb_path.is_file(), "SDL3.pdb has not been created"
-        assert imp_path.is_file(), "SDL3.lib has not been created"
-        assert test_path.is_file(), "SDL3_test.lib has not been created"
-
-        zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch}.zip"
-        zip_path.unlink(missing_ok=True)
-        logger.info("Creating %s", zip_path)
-        with zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED) as zf:
-            logger.debug("Adding %s", dll_path.name)
-            zf.write(dll_path, arcname=dll_path.name)
-            logger.debug("Adding %s", "README-SDL.txt")
-            zf.write(self.root / "README-SDL.txt", arcname="README-SDL.txt")
-            self._zip_add_git_hash(zip_file=zf)
-        self.artifacts[f"VC-{arch}"] = zip_path
-
-        return VcArchDevel(dll=dll_path, pdb=pdb_path, imp=imp_path, test=test_path)
-
-    def build_vs_cmake(self, arch: str, arch_cmake: str) -> VcArchDevel:
-        build_path = self.root / f"build-vs-{arch}"
-        install_path = build_path / "prefix"
-        dll_path = install_path / f"bin/{self.project}.dll"
-        pdb_path = install_path / f"bin/{self.project}.pdb"
-        imp_path = install_path / f"lib/{self.project}.lib"
-        test_path = install_path / f"lib/{self.project}_test.lib"
-
-        dll_path.unlink(missing_ok=True)
-        pdb_path.unlink(missing_ok=True)
-        imp_path.unlink(missing_ok=True)
-        test_path.unlink(missing_ok=True)
-
-        build_type = "Release"
-
-        shutil.rmtree(install_path, ignore_errors=True)
-        build_path.mkdir(parents=True, exist_ok=True)
-        with self.section_printer.group(f"Configure VC CMake project for {arch}"):
-            self.executer.run([
-                "cmake", "-S", str(self.root), "-B", str(build_path),
-                "--fresh",
-                "-A", arch_cmake,
-                "-DSDL_SHARED=ON",
-                "-DSDL_STATIC=OFF",
-                "-DSDL_DISABLE_INSTALL_DOCS=ON",
-                "-DSDL_TEST_LIBRARY=ON",
-                "-DSDL_TESTS=OFF",
-                "-DCMAKE_INSTALL_BINDIR=bin",
-                "-DCMAKE_INSTALL_DATAROOTDIR=share",
-                "-DCMAKE_INSTALL_INCLUDEDIR=include",
-                "-DCMAKE_INSTALL_LIBDIR=lib",
-                f"-DCMAKE_BUILD_TYPE={build_type}",
-                f"-DCMAKE_INSTALL_PREFIX={install_path}",
-                # MSVC debug information format flags are selected by an abstraction
-                "-DCMAKE_POLICY_DEFAULT_CMP0141=NEW",
-                # MSVC debug information format
-                "-DCMAKE_MSVC_DEBUG_INFORMATION_FORMAT=ProgramDatabase",
-                # Linker flags for executables
-                "-DCMAKE_EXE_LINKER_FLAGS=-DEBUG",
-                # Linker flag for shared libraries
-                "-DCMAKE_SHARED_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF",
-                # MSVC runtime library flags are selected by an abstraction
-                "-DCMAKE_POLICY_DEFAULT_CMP0091=NEW",
-                # Use statically linked runtime (-MT) (ideally, should be "MultiThreaded$<$<CONFIG:Debug>:Debug>")
-                "-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
-            ])
+        tgz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.gz"
+        txz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.xz"
 
 
-        with self.section_printer.group(f"Build VC CMake project for {arch}"):
-            self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type])
-        with self.section_printer.group(f"Install VC CMake project for {arch}"):
-            self.executer.run(["cmake", "--install", str(build_path), "--config", build_type])
+        with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
+            archive_file_tree.add_to_archiver(archive_base="", archiver=archiver)
+            archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
+        print("... done")
 
 
-        assert dll_path.is_file(), "SDL3.dll has not been created"
-        assert pdb_path.is_file(), "SDL3.pdb has not been created"
-        assert imp_path.is_file(), "SDL3.lib has not been created"
-        assert test_path.is_file(), "SDL3_test.lib has not been created"
+        self.artifacts["mingw-devel-zip"] = zip_path
+        self.artifacts["mingw-devel-tar-gz"] = tgz_path
+        self.artifacts["mingw-devel-tar-xz"] = txz_path
 
 
-        zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch}.zip"
-        zip_path.unlink(missing_ok=True)
-        logger.info("Creating %s", zip_path)
-        with zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED) as zf:
-            logger.debug("Adding %s", dll_path.name)
-            zf.write(dll_path, arcname=dll_path.name)
-            logger.debug("Adding %s", "README-SDL.txt")
-            zf.write(self.root / "README-SDL.txt", arcname="README-SDL.txt")
-            self._zip_add_git_hash(zip_file=zf)
-        self.artifacts[f"VC-{arch}"] = zip_path
-
-        return VcArchDevel(dll=dll_path, pdb=pdb_path, imp=imp_path, test=test_path)
-
-    def build_vs_devel(self, arch_vc: dict[str, VcArchDevel]) -> None:
-        zip_path = self.dist_path / f"{self.project}-devel-{self.version}-VC.zip"
-        archive_prefix = f"{self.project}-{self.version}"
-
-        def zip_file(zf: zipfile.ZipFile, path: Path, arcrelpath: str):
-            arcname = f"{archive_prefix}/{arcrelpath}"
-            logger.debug("Adding %s to %s", path, arcname)
-            zf.write(path, arcname=arcname)
-
-        def zip_directory(zf: zipfile.ZipFile, directory: Path, arcrelpath: str):
-            for f in directory.iterdir():
-                if f.is_file():
-                    arcname = f"{archive_prefix}/{arcrelpath}/{f.name}"
-                    logger.debug("Adding %s to %s", f, arcname)
-                    zf.write(f, arcname=arcname)
-
-        with zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED) as zf:
-            for arch, binaries in arch_vc.items():
-                zip_file(zf, path=binaries.dll, arcrelpath=f"lib/{arch}/{binaries.dll.name}")
-                zip_file(zf, path=binaries.imp, arcrelpath=f"lib/{arch}/{binaries.imp.name}")
-                zip_file(zf, path=binaries.pdb, arcrelpath=f"lib/{arch}/{binaries.pdb.name}")
-                zip_file(zf, path=binaries.test, arcrelpath=f"lib/{arch}/{binaries.test.name}")
-
-            zip_directory(zf, directory=self.root / "include/SDL3", arcrelpath="include/SDL3")
-            zip_directory(zf, directory=self.root / "docs", arcrelpath="docs")
-            zip_directory(zf, directory=self.root / "VisualC/pkg-support/cmake", arcrelpath="cmake")
-            zip_file(zf, path=self.root / "cmake/sdlcpu.cmake", arcrelpath="cmake/sdlcpu.cmake")
-
-            for txt in ("BUGS.txt", "README-SDL.txt", "WhatsNew.txt"):
-                zip_file(zf, path=self.root / txt, arcrelpath=txt)
-            zip_file(zf, path=self.root / "LICENSE.txt", arcrelpath="COPYING.txt")
-            zip_file(zf, path=self.root / "README.md", arcrelpath="README.txt")
-
-            self._zip_add_git_hash(zip_file=zf, root=archive_prefix)
-        self.artifacts["VC-devel"] = zip_path
-
-    def detect_android_api(self, android_home: str) -> typing.Optional[int]:
+    def _detect_android_api(self, android_home: str) -> typing.Optional[int]:
         platform_dirs = list(Path(p) for p in glob.glob(f"{android_home}/platforms/android-*"))
         platform_dirs = list(Path(p) for p in glob.glob(f"{android_home}/platforms/android-*"))
         re_platform = re.compile("android-([0-9]+)")
         re_platform = re.compile("android-([0-9]+)")
         platform_versions = []
         platform_versions = []
@@ -555,15 +831,15 @@ class Releaser:
                 platform_versions.append(int(m.group(1)))
                 platform_versions.append(int(m.group(1)))
         platform_versions.sort()
         platform_versions.sort()
         logger.info("Available platform versions: %s", platform_versions)
         logger.info("Available platform versions: %s", platform_versions)
-        platform_versions = list(filter(lambda v: v >= ANDROID_MINIMUM_API, platform_versions))
-        logger.info("Valid platform versions (>=%d): %s", ANDROID_MINIMUM_API, platform_versions)
+        platform_versions = list(filter(lambda v: v >= self._android_api_minimum, platform_versions))
+        logger.info("Valid platform versions (>=%d): %s", self._android_api_minimum, platform_versions)
         if not platform_versions:
         if not platform_versions:
             return None
             return None
         android_api = platform_versions[0]
         android_api = platform_versions[0]
         logger.info("Selected API version %d", android_api)
         logger.info("Selected API version %d", android_api)
         return android_api
         return android_api
 
 
-    def get_prefab_json_text(self) -> str:
+    def _get_prefab_json_text(self) -> str:
         return textwrap.dedent(f"""\
         return textwrap.dedent(f"""\
             {{
             {{
                 "schema_version": 2,
                 "schema_version": 2,
@@ -573,195 +849,445 @@ class Releaser:
             }}
             }}
         """)
         """)
 
 
-    def get_prefab_module_json_text(self, library_name: str, extra_libs: list[str]) -> str:
-        export_libraries_str = ", ".join(f"\"-l{lib}\"" for lib in extra_libs)
-        return textwrap.dedent(f"""\
-            {{
-                "export_libraries": [{export_libraries_str}],
-                "library_name": "lib{library_name}"
-            }}
-        """)
+    def _get_prefab_module_json_text(self, library_name: typing.Optional[str], export_libraries: list[str]) -> str:
+        for lib in export_libraries:
+            assert isinstance(lib, str), f"{lib} must be a string"
+        module_json_dict = {
+            "export_libraries": export_libraries,
+        }
+        if library_name:
+            module_json_dict["library_name"] = f"lib{library_name}"
+        return json.dumps(module_json_dict, indent=4)
 
 
-    def get_prefab_abi_json_text(self, abi: str, cpp: bool, shared: bool) -> str:
-        return textwrap.dedent(f"""\
-            {{
-              "abi": "{abi}",
-              "api": {ANDROID_MINIMUM_API},
-              "ndk": {ANDROID_MINIMUM_NDK},
-              "stl": "{'c++_shared' if cpp else 'none'}",
-              "static": {'true' if not shared else 'false'}
-            }}
-        """)
+    @property
+    def _android_api_minimum(self):
+        return self.release_info["android"]["api-minimum"]
 
 
-    def get_android_manifest_text(self) -> str:
+    @property
+    def _android_api_target(self):
+        return self.release_info["android"]["api-target"]
+
+    @property
+    def _android_ndk_minimum(self):
+        return self.release_info["android"]["ndk-minimum"]
+
+    def _get_prefab_abi_json_text(self, abi: str, cpp: bool, shared: bool) -> str:
+        abi_json_dict = {
+            "abi": abi,
+            "api": self._android_api_minimum,
+            "ndk": self._android_ndk_minimum,
+            "stl": "c++_shared" if cpp else "none",
+            "static": not shared,
+        }
+        return json.dumps(abi_json_dict, indent=4)
+
+    def _get_android_manifest_text(self) -> str:
         return textwrap.dedent(f"""\
         return textwrap.dedent(f"""\
             <manifest
             <manifest
                 xmlns:android="http://schemas.android.com/apk/res/android"
                 xmlns:android="http://schemas.android.com/apk/res/android"
                 package="org.libsdl.android.{self.project}" android:versionCode="1"
                 package="org.libsdl.android.{self.project}" android:versionCode="1"
                 android:versionName="1.0">
                 android:versionName="1.0">
-                <uses-sdk android:minSdkVersion="{ANDROID_MINIMUM_API}"
-                          android:targetSdkVersion="{ANDROID_TARGET_API}" />
+                <uses-sdk android:minSdkVersion="{self._android_api_minimum}"
+                          android:targetSdkVersion="{self._android_api_target}" />
             </manifest>
             </manifest>
         """)
         """)
 
 
-    def create_android_archives(self, android_api: int, android_home: Path, android_ndk_home: Path, android_abis: list[str]) -> None:
+    def create_android_archives(self, android_api: int, android_home: Path, android_ndk_home: Path) -> None:
         cmake_toolchain_file = Path(android_ndk_home) / "build/cmake/android.toolchain.cmake"
         cmake_toolchain_file = Path(android_ndk_home) / "build/cmake/android.toolchain.cmake"
         if not cmake_toolchain_file.exists():
         if not cmake_toolchain_file.exists():
             logger.error("CMake toolchain file does not exist (%s)", cmake_toolchain_file)
             logger.error("CMake toolchain file does not exist (%s)", cmake_toolchain_file)
             raise SystemExit(1)
             raise SystemExit(1)
         aar_path =  self.dist_path / f"{self.project}-{self.version}.aar"
         aar_path =  self.dist_path / f"{self.project}-{self.version}.aar"
-        added_global_files = False
-        with zipfile.ZipFile(aar_path, "w", compression=zipfile.ZIP_DEFLATED) as zip_object:
-            def configure_file(path: Path) -> str:
-                text = path.read_text()
-                text = text.replace("@PROJECT_VERSION@", self.version)
-                text = text.replace("@PROJECT_NAME@", self.project)
-                return text
-
-            install_txt = configure_file(self.root / "build-scripts/pkg-support/android/INSTALL.md.in")
-            zip_object.writestr("INSTALL.md", install_txt)
-
-            project_description = {
-                "name": self.project,
-                "version": self.version,
-                "git-hash": self.commit,
-            }
-            zip_object.writestr("description.json", json.dumps(project_description, indent=0))
-            main_py = configure_file(self.root / "build-scripts/pkg-support/android/__main__.py.in")
-            zip_object.writestr("__main__.py", main_py)
-
-            zip_object.writestr("AndroidManifest.xml", self.get_android_manifest_text())
-            zip_object.write(self.root / "android-project/app/proguard-rules.pro", arcname="proguard.txt")
-            zip_object.write(self.root / "LICENSE.txt", arcname="META-INF/LICENSE.txt")
-            zip_object.write(self.root / "cmake/sdlcpu.cmake", arcname="cmake/sdlcpu.cmake")
-            zip_object.write(self.root / "build-scripts/pkg-support/android/cmake/SDL3Config.cmake", arcname="cmake/SDL3Config.cmake")
-            zip_object.write(self.root / "build-scripts/pkg-support/android/cmake/SDL3ConfigVersion.cmake", arcname="cmake/SDL3ConfigVersion.cmake")
-            zip_object.writestr("prefab/prefab.json", self.get_prefab_json_text())
-            self._zip_add_git_hash(zip_file=zip_object)
-
-            for android_abi in android_abis:
-                with self.section_printer.group(f"Building for Android {android_api} {android_abi}"):
-                    build_dir = self.root / "build-android" / f"{android_abi}-build"
-                    install_dir = self.root / "install-android" / f"{android_abi}-install"
-                    shutil.rmtree(install_dir, ignore_errors=True)
-                    assert not install_dir.is_dir(), f"{install_dir} should not exist prior to build"
-                    cmake_args = [
-                        "cmake",
-                        "-S", str(self.root),
-                        "-B", str(build_dir),
-                        "--fresh",
-                        f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
-                        f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
-                        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
-                        f"-DCMAKE_TOOLCHAIN_FILE={cmake_toolchain_file}",
-                        f"-DANDROID_PLATFORM={android_api}",
-                        f"-DANDROID_ABI={android_abi}",
-                        "-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
-                        "-DSDL_SHARED=ON",
-                        "-DSDL_STATIC=OFF",
-                        "-DSDL_TEST_LIBRARY=ON",
-                        "-DSDL_DISABLE_ANDROID_JAR=OFF",
-                        "-DSDL_TESTS=OFF",
-                        f"-DCMAKE_INSTALL_PREFIX={install_dir}",
-                        "-DSDL_DISABLE_INSTALL=OFF",
-                        "-DSDL_DISABLE_INSTALL_DOCS=OFF",
-                        "-DCMAKE_INSTALL_INCLUDEDIR=include ",
-                        "-DCMAKE_INSTALL_LIBDIR=lib",
-                        "-DCMAKE_INSTALL_DATAROOTDIR=share",
-                        "-DCMAKE_BUILD_TYPE=Release",
-                        f"-DSDL_ANDROID_HOME={android_home}",
-                        f"-G{self.cmake_generator}",
-                    ]
-                    build_args = [
-                        "cmake",
-                        "--build", str(build_dir),
-                        "--config", "RelWithDebInfo",
-                    ]
-                    install_args = [
-                        "cmake",
-                        "--install", str(build_dir),
-                        "--config", "RelWithDebInfo",
-                    ]
-                    self.executer.run(cmake_args)
-                    self.executer.run(build_args)
-                    self.executer.run(install_args)
-
-                    main_so_library = install_dir / "lib" / f"lib{self.project}.so"
-                    logger.debug("Expecting library %s", main_so_library)
-                    assert main_so_library.is_file(), "CMake should have built a shared library (e.g. libSDL3.so)"
-
-                    test_library = install_dir / "lib" / f"lib{self.project}_test.a"
-                    logger.debug("Expecting library %s", test_library)
-                    assert test_library.is_file(), "CMake should have built a static test library (e.g. libSDL3_test.a)"
-
-                    java_jar = install_dir / f"share/java/{self.project}/{self.project}-{self.version}.jar"
-                    logger.debug("Expecting java archive: %s", java_jar)
-                    assert java_jar.is_file(), "CMake should have compiled the java sources and archived them into a JAR"
-
-                    javasources_jar = install_dir / f"share/java/{self.project}/{self.project}-{self.version}-sources.jar"
-                    logger.debug("Expecting java sources archive %s", javasources_jar)
-                    assert javasources_jar.is_file(), "CMake should have archived the java sources into a JAR"
-
-                    javadoc_dir = install_dir / "share/javadoc" / self.project
-                    logger.debug("Expecting javadoc archive %s", javadoc_dir)
-                    assert javadoc_dir.is_dir(), "CMake should have built javadoc documentation for the java sources"
-                    if not added_global_files:
-                        zip_object.write(java_jar, arcname="classes.jar")
-                        zip_object.write(javasources_jar, arcname="classes-sources.jar", )
-                        doc_jar_path = install_dir / "classes-doc.jar"
-
-                        javadoc_jar_args = ["jar", "--create", "--file", str(doc_jar_path)]
-                        for fn in javadoc_dir.iterdir():
-                            javadoc_jar_args.extend(["-C", str(javadoc_dir), fn.name])
-                        self.executer.run(javadoc_jar_args)
-                        zip_object.write(doc_jar_path, arcname="classes-doc.jar")
-
-                        for header in (install_dir / "include" / self.project).iterdir():
-                            zip_object.write(header, arcname=f"prefab/modules/{self.project}/include/{self.project}/{header.name}")
-
-                        zip_object.writestr(f"prefab/modules/{self.project}/module.json", self.get_prefab_module_json_text(library_name=self.project, extra_libs=[]))
-                        zip_object.writestr(f"prefab/modules/{self.project}_test/module.json", self.get_prefab_module_json_text(library_name=f"{self.project}_test", extra_libs=list()))
-                        added_global_files = True
-
-                    zip_object.write(main_so_library, arcname=f"prefab/modules/{self.project}/libs/android.{android_abi}/lib{self.project}.so")
-                    zip_object.writestr(f"prefab/modules/{self.project}/libs/android.{android_abi}/abi.json", self.get_prefab_abi_json_text(abi=android_abi, cpp=False, shared=True))
-                    zip_object.write(test_library, arcname=f"prefab/modules/{self.project}_test/libs/android.{android_abi}/lib{self.project}_test.a")
-                    zip_object.writestr(f"prefab/modules/{self.project}_test/libs/android.{android_abi}/abi.json", self.get_prefab_abi_json_text(abi=android_abi, cpp=False, shared=False))
-
+        android_abis = self.release_info["android"]["abis"]
+        java_jars_added = False
+        module_data_added = False
+        android_deps_path = self.deps_path / "android-deps"
+        shutil.rmtree(android_deps_path, ignore_errors=True)
+
+        for dep, depinfo in self.release_info["android"].get("dependencies", {}).items():
+            android_aar = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
+            with self.section_printer.group(f"Extracting Android dependency {dep} ({android_aar.name})"):
+                self.executer.run([sys.executable, str(android_aar), "-o", str(android_deps_path)])
+
+        for module_name, module_info in self.release_info["android"]["modules"].items():
+            assert "type" in module_info and module_info["type"] in ("interface", "library"), f"module {module_name} must have a valid type"
+
+        archive_file_tree = ArchiveFileTree()
+
+        for android_abi in android_abis:
+            with self.section_printer.group(f"Building for Android {android_api} {android_abi}"):
+                build_dir = self.root / "build-android" / f"{android_abi}-build"
+                install_dir = self.root / "install-android" / f"{android_abi}-install"
+                shutil.rmtree(install_dir, ignore_errors=True)
+                assert not install_dir.is_dir(), f"{install_dir} should not exist prior to build"
+                build_type = "Release"
+                cmake_args = [
+                    "cmake",
+                    "-S", str(self.root),
+                    "-B", str(build_dir),
+                    f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
+                    f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
+                    f"-DCMAKE_TOOLCHAIN_FILE={cmake_toolchain_file}",
+                    f"-DCMAKE_PREFIX_PATH={str(android_deps_path)}",
+                    f"-DCMAKE_FIND_ROOT_PATH_MODE_PACKAGE=BOTH",
+                    f"-DANDROID_HOME={android_home}",
+                    f"-DANDROID_PLATFORM={android_api}",
+                    f"-DANDROID_ABI={android_abi}",
+                    "-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
+                    f"-DCMAKE_INSTALL_PREFIX={install_dir}",
+                    "-DCMAKE_INSTALL_INCLUDEDIR=include ",
+                    "-DCMAKE_INSTALL_LIBDIR=lib",
+                    "-DCMAKE_INSTALL_DATAROOTDIR=share",
+                    f"-DCMAKE_BUILD_TYPE={build_type}",
+                    f"-G{self.cmake_generator}",
+                ] + self.release_info["android"]["cmake"]["args"] + ([] if self.fast else ["--fresh"])
+                build_args = [
+                    "cmake",
+                    "--build", str(build_dir),
+                    "--verbose",
+                    "--config", build_type,
+                ]
+                install_args = [
+                    "cmake",
+                    "--install", str(build_dir),
+                    "--config", build_type,
+                ]
+                self.executer.run(cmake_args)
+                self.executer.run(build_args)
+                self.executer.run(install_args)
+
+                for module_name, module_info in self.release_info["android"]["modules"].items():
+                    arcdir_prefab_module = f"prefab/modules/{module_name}"
+                    if module_info["type"] == "library":
+                        library = install_dir / module_info["library"]
+                        assert library.suffix in (".so", ".a")
+                        assert library.is_file(), f"CMake should have built library '{library}' for module {module_name}"
+                        arcdir_prefab_libs = f"{arcdir_prefab_module}/libs/android.{android_abi}"
+                        archive_file_tree.add_file(NodeInArchive.from_fs(arcpath=f"{arcdir_prefab_libs}/{library.name}", path=library, time=self.arc_time))
+                        archive_file_tree.add_file(NodeInArchive.from_text(arcpath=f"{arcdir_prefab_libs}/abi.json", text=self._get_prefab_abi_json_text(abi=android_abi, cpp=False, shared=library.suffix == ".so"), time=self.arc_time))
+
+                    if not module_data_added:
+                        library_name = None
+                        if module_info["type"] == "library":
+                            library_name = Path(module_info["library"]).stem.removeprefix("lib")
+                        export_libraries = module_info.get("export-libraries", [])
+                        archive_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_module, "module.json"), text=self._get_prefab_module_json_text(library_name=library_name, export_libraries=export_libraries), time=self.arc_time))
+                        arcdir_prefab_include = f"prefab/modules/{module_name}/include"
+                        if "includes" in module_info:
+                            archive_file_tree.add_file_mapping(arc_dir=arcdir_prefab_include, file_mapping=module_info["includes"], file_mapping_root=install_dir, context=self.get_context(), time=self.arc_time)
+                        else:
+                            archive_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_include, ".keep"), text="\n", time=self.arc_time))
+                module_data_added = True
+
+                if not java_jars_added:
+                    java_jars_added = True
+                    if "jars" in self.release_info["android"]:
+                        classes_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["classes"], context=self.get_context())
+                        sources_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["sources"], context=self.get_context())
+                        doc_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["doc"], context=self.get_context())
+                        assert classes_jar_path.is_file(), f"CMake should have compiled the java sources and archived them into a JAR ({classes_jar_path})"
+                        assert sources_jar_path.is_file(), f"CMake should have archived the java sources into a JAR ({sources_jar_path})"
+                        assert doc_jar_path.is_file(), f"CMake should have archived javadoc into a JAR ({doc_jar_path})"
+
+                        archive_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes.jar", path=classes_jar_path, time=self.arc_time))
+                        archive_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-sources.jar", path=sources_jar_path, time=self.arc_time))
+                        archive_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-doc.jar", path=doc_jar_path, time=self.arc_time))
+
+        assert ("jars" in self.release_info["android"] and java_jars_added) or "jars" not in self.release_info["android"], "Must have archived java JAR archives"
+
+        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
+
+        archive_file_tree.add_file(NodeInArchive.from_text(arcpath="prefab/prefab.json", text=self._get_prefab_json_text(), time=self.arc_time))
+        archive_file_tree.add_file(NodeInArchive.from_text(arcpath="AndroidManifest.xml", text=self._get_android_manifest_text(), time=self.arc_time))
+
+        with Archiver(zip_path=aar_path) as archiver:
+            archive_file_tree.add_to_archiver(archive_base="", archiver=archiver)
+            archiver.add_git_hash(arcdir="", commit=self.commit, time=self.arc_time)
         self.artifacts[f"android-aar"] = aar_path
         self.artifacts[f"android-aar"] = aar_path
 
 
+    def download_dependencies(self):
+        shutil.rmtree(self.deps_path, ignore_errors=True)
+        self.deps_path.mkdir(parents=True)
+
+        if self.github:
+            with open(os.environ["GITHUB_OUTPUT"], "a") as f:
+                f.write(f"dep-path={self.deps_path.absolute()}\n")
+
+        for dep, depinfo in self.release_info.get("dependencies", {}).items():
+            startswith = depinfo["startswith"]
+            dep_repo = depinfo["repo"]
+            # FIXME: dropped "--exclude-pre-releases"
+            dep_string_data = self.executer.check_output(["gh", "-R", dep_repo, "release", "list", "--exclude-drafts", "--json", "name,createdAt,tagName", "--jq", f'[.[]|select(.name|startswith("{startswith}"))]|max_by(.createdAt)']).strip()
+            dep_data = json.loads(dep_string_data)
+            dep_tag = dep_data["tagName"]
+            dep_version = dep_data["name"]
+            logger.info("Download dependency %s version %s (tag=%s) ", dep, dep_version, dep_tag)
+            self.executer.run(["gh", "-R", dep_repo, "release", "download", dep_tag], cwd=self.deps_path)
+            if self.github:
+                with open(os.environ["GITHUB_OUTPUT"], "a") as f:
+                    f.write(f"dep-{dep.lower()}-version={dep_version}\n")
+
+    def verify_dependencies(self):
+        for dep, depinfo in self.release_info.get("dependencies", {}).items():
+            if "mingw" in self.release_info:
+                mingw_matches = glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
+                assert len(mingw_matches) == 1, f"Exactly one archive matches mingw {dep} dependency: {mingw_matches}"
+            if "dmg" in self.release_info:
+                dmg_matches = glob.glob(self.release_info["dmg"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
+                assert len(dmg_matches) == 1, f"Exactly one archive matches dmg {dep} dependency: {dmg_matches}"
+            if "msvc" in self.release_info:
+                msvc_matches = glob.glob(self.release_info["msvc"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
+                assert len(msvc_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}"
+            if "android" in self.release_info:
+                android_matches = glob.glob(self.release_info["android"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
+                assert len(android_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}"
+
+    @staticmethod
+    def _arch_to_vs_platform(arch: str, configuration: str="Release") -> VsArchPlatformConfig:
+        ARCH_TO_VS_PLATFORM = {
+            "x86": VsArchPlatformConfig(arch="x86", platform="Win32", configuration=configuration),
+            "x64": VsArchPlatformConfig(arch="x64", platform="x64", configuration=configuration),
+            "arm64": VsArchPlatformConfig(arch="arm64", platform="ARM64", configuration=configuration),
+        }
+        return ARCH_TO_VS_PLATFORM[arch]
+
+    def build_msvc(self):
+        with self.section_printer.group("Find Visual Studio"):
+            vs = VisualStudio(executer=self.executer)
+        for arch in self.release_info["msvc"].get("msbuild", {}).get("archs", []):
+            self._build_msvc_msbuild(arch_platform=self._arch_to_vs_platform(arch=arch), vs=vs)
+        if "cmake" in self.release_info["msvc"]:
+            deps_path = self.root / "msvc-deps"
+            shutil.rmtree(deps_path, ignore_errors=True)
+            dep_roots = []
+            for dep, depinfo in self.release_info["msvc"].get("dependencies", {}).items():
+                dep_extract_path = deps_path / f"extract-{dep}"
+                msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
+                with zipfile.ZipFile(msvc_zip, "r") as zf:
+                    zf.extractall(dep_extract_path)
+                contents_msvc_zip = glob.glob(str(dep_extract_path / "*"))
+                assert len(contents_msvc_zip) == 1, f"There must be exactly one root item in the root directory of {dep}"
+                dep_roots.append(contents_msvc_zip[0])
+
+            for arch in self.release_info["msvc"].get("cmake", {}).get("archs", []):
+                self._build_msvc_cmake(arch_platform=self._arch_to_vs_platform(arch=arch), dep_roots=dep_roots)
+        with self.section_printer.group("Create SDL VC development zip"):
+            self._build_msvc_devel()
+
+    def _build_msvc_msbuild(self, arch_platform: VsArchPlatformConfig, vs: VisualStudio):
+        platform_context = self.get_context(arch_platform.extra_context())
+        for dep, depinfo in self.release_info["msvc"].get("dependencies", {}).items():
+            msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
+
+            src_globs = [configure_text(instr["src"], context=platform_context) for instr in depinfo["copy"]]
+            with zipfile.ZipFile(msvc_zip, "r") as zf:
+                for member in zf.namelist():
+                    member_path = "/".join(Path(member).parts[1:])
+                    for src_i, src_glob in enumerate(src_globs):
+                        if fnmatch.fnmatch(member_path, src_glob):
+                            dst = (self.root / configure_text(depinfo["copy"][src_i]["dst"], context=platform_context)).resolve() / Path(member_path).name
+                            zip_data = zf.read(member)
+                            if dst.exists():
+                                identical = False
+                                if dst.is_file():
+                                    orig_bytes = dst.read_bytes()
+                                    if orig_bytes == zip_data:
+                                        identical = True
+                                if not identical:
+                                    logger.warning("Extracting dependency %s, will cause %s to be overwritten", dep, dst)
+                                    if not self.overwrite:
+                                        raise RuntimeError("Run with --overwrite to allow overwriting")
+                            logger.debug("Extracting %s -> %s", member, dst)
+
+                            dst.parent.mkdir(exist_ok=True, parents=True)
+                            dst.write_bytes(zip_data)
+
+        prebuilt_paths = set(self.root / full_prebuilt_path for prebuilt_path in self.release_info["msvc"]["msbuild"].get("prebuilt", []) for full_prebuilt_path in glob.glob(configure_text(prebuilt_path, context=platform_context), root_dir=self.root))
+        msbuild_paths = set(self.root / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["msbuild"]["files-lib"], self.release_info["msvc"]["msbuild"]["files-devel"]) for files_list in file_mapping.values() for f in files_list)
+        assert prebuilt_paths.issubset(msbuild_paths), f"msvc.msbuild.prebuilt must be a subset of (msvc.msbuild.files-lib, msvc.msbuild.files-devel)"
+        built_paths = msbuild_paths.difference(prebuilt_paths)
+        logger.info("MSbuild builds these files, to be included in the package: %s", built_paths)
+        if not self.fast:
+            for b in built_paths:
+                b.unlink(missing_ok=True)
+
+        rel_projects: list[str] = self.release_info["msvc"]["msbuild"]["projects"]
+        projects = list(self.root / p for p in rel_projects)
+
+        directory_build_props_src_relpath = self.release_info["msvc"]["msbuild"].get("directory-build-props")
+        for project in projects:
+            dir_b_props = project.parent / "Directory.Build.props"
+            dir_b_props.unlink(missing_ok = True)
+            if directory_build_props_src_relpath:
+                src = self.root / directory_build_props_src_relpath
+                logger.debug("Copying %s -> %s", src, dir_b_props)
+                shutil.copy(src=src, dst=dir_b_props)
+
+        with self.section_printer.group(f"Build {arch_platform.arch} VS binary"):
+            vs.build(arch_platform=arch_platform, projects=projects)
+
+        if self.dry:
+            for b in built_paths:
+                b.parent.mkdir(parents=True, exist_ok=True)
+                b.touch()
+
+        for b in built_paths:
+            assert b.is_file(), f"{b} has not been created"
+            b.parent.mkdir(parents=True, exist_ok=True)
+            b.touch()
+
+        zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip"
+        zip_path.unlink(missing_ok=True)
+
+        logger.info("Collecting files...")
+        archive_file_tree = ArchiveFileTree()
+        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["msbuild"]["files-lib"], file_mapping_root=self.root, context=platform_context, time=self.arc_time)
+        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=platform_context, time=self.arc_time)
+
+        logger.info("Writing to %s", zip_path)
+        with Archiver(zip_path=zip_path) as archiver:
+            arc_root = f""
+            archive_file_tree.add_to_archiver(archive_base=arc_root, archiver=archiver)
+            archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
+        self.artifacts[f"VC-{arch_platform.arch}"] = zip_path
+
+        for p in built_paths:
+            assert p.is_file(), f"{p} should exist"
+
+    def _arch_platform_to_build_path(self, arch_platform: VsArchPlatformConfig) -> Path:
+        return self.root / f"build-vs-{arch_platform.arch}"
+
+    def _arch_platform_to_install_path(self, arch_platform: VsArchPlatformConfig) -> Path:
+        return self._arch_platform_to_build_path(arch_platform) / "prefix"
+
+    def _build_msvc_cmake(self, arch_platform: VsArchPlatformConfig, dep_roots: list[Path]):
+        build_path = self._arch_platform_to_build_path(arch_platform)
+        install_path = self._arch_platform_to_install_path(arch_platform)
+        platform_context = self.get_context(extra_context=arch_platform.extra_context())
+
+        build_type = "Release"
+
+        built_paths = set(install_path / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["cmake"]["files-lib"], self.release_info["msvc"]["cmake"]["files-devel"]) for files_list in file_mapping.values() for f in files_list)
+        logger.info("CMake builds these files, to be included in the package: %s", built_paths)
+        if not self.fast:
+            for b in built_paths:
+                b.unlink(missing_ok=True)
+
+        shutil.rmtree(install_path, ignore_errors=True)
+        build_path.mkdir(parents=True, exist_ok=True)
+        with self.section_printer.group(f"Configure VC CMake project for {arch_platform.arch}"):
+            self.executer.run([
+                "cmake", "-S", str(self.root), "-B", str(build_path),
+                "-A", arch_platform.platform,
+                "-DCMAKE_INSTALL_BINDIR=bin",
+                "-DCMAKE_INSTALL_DATAROOTDIR=share",
+                "-DCMAKE_INSTALL_INCLUDEDIR=include",
+                "-DCMAKE_INSTALL_LIBDIR=lib",
+                f"-DCMAKE_BUILD_TYPE={build_type}",
+                f"-DCMAKE_INSTALL_PREFIX={install_path}",
+                # MSVC debug information format flags are selected by an abstraction
+                "-DCMAKE_POLICY_DEFAULT_CMP0141=NEW",
+                # MSVC debug information format
+                "-DCMAKE_MSVC_DEBUG_INFORMATION_FORMAT=ProgramDatabase",
+                # Linker flags for executables
+                "-DCMAKE_EXE_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF",
+                # Linker flag for shared libraries
+                "-DCMAKE_SHARED_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF",
+                # MSVC runtime library flags are selected by an abstraction
+                "-DCMAKE_POLICY_DEFAULT_CMP0091=NEW",
+                # Use statically linked runtime (-MT) (ideally, should be "MultiThreaded$<$<CONFIG:Debug>:Debug>")
+                "-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
+                f"-DCMAKE_PREFIX_PATH={';'.join(str(s) for s in dep_roots)}",
+            ] + self.release_info["msvc"]["cmake"]["args"] + ([] if self.fast else ["--fresh"]))
+
+        with self.section_printer.group(f"Build VC CMake project for {arch_platform.arch}"):
+            self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type])
+        with self.section_printer.group(f"Install VC CMake project for {arch_platform.arch}"):
+            self.executer.run(["cmake", "--install", str(build_path), "--config", build_type])
+
+        if self.dry:
+            for b in built_paths:
+                b.parent.mkdir(parents=True, exist_ok=True)
+                b.touch()
+
+        zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip"
+        zip_path.unlink(missing_ok=True)
+
+        logger.info("Collecting files...")
+        archive_file_tree = ArchiveFileTree()
+        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["cmake"]["files-lib"], file_mapping_root=install_path, context=platform_context, time=self.arc_time)
+        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
+
+        logger.info("Creating %s", zip_path)
+        with Archiver(zip_path=zip_path) as archiver:
+            arc_root = f""
+            archive_file_tree.add_to_archiver(archive_base=arc_root, archiver=archiver)
+            archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
+
+        for p in built_paths:
+            assert p.is_file(), f"{p} should exist"
+
+    def _build_msvc_devel(self) -> None:
+        zip_path = self.dist_path / f"{self.project}-devel-{self.version}-VC.zip"
+        arc_root = f"{self.project}-{self.version}"
+
+        logger.info("Collecting files...")
+        archive_file_tree = ArchiveFileTree()
+        if "msbuild" in self.release_info["msvc"]:
+            for arch in self.release_info["msvc"]["msbuild"]["archs"]:
+                arch_platform = self._arch_to_vs_platform(arch=arch)
+                platform_context = self.get_context(arch_platform.extra_context())
+                archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["msbuild"]["files-devel"], file_mapping_root=self.root, context=platform_context, time=self.arc_time)
+        if "cmake" in self.release_info["msvc"]:
+            for arch in self.release_info["msvc"]["cmake"]["archs"]:
+                arch_platform = self._arch_to_vs_platform(arch=arch)
+                platform_context = self.get_context(arch_platform.extra_context())
+                archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["cmake"]["files-devel"], file_mapping_root=self._arch_platform_to_install_path(arch_platform), context=platform_context, time=self.arc_time)
+        archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["files-devel"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
+
+        with Archiver(zip_path=zip_path) as archiver:
+            archive_file_tree.add_to_archiver(archive_base="", archiver=archiver)
+            archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
+        self.artifacts["VC-devel"] = zip_path
+
     @classmethod
     @classmethod
-    def extract_sdl_version(cls, root: Path, project: str) -> str:
-        with open(root / f"include/{project}/SDL_version.h", "r") as f:
+    def extract_sdl_version(cls, root: Path, release_info: dict) -> str:
+        with open(root / release_info["version"]["file"], "r") as f:
             text = f.read()
             text = f.read()
-        major = next(re.finditer(r"^#define SDL_MAJOR_VERSION\s+([0-9]+)$", text, flags=re.M)).group(1)
-        minor = next(re.finditer(r"^#define SDL_MINOR_VERSION\s+([0-9]+)$", text, flags=re.M)).group(1)
-        micro = next(re.finditer(r"^#define SDL_MICRO_VERSION\s+([0-9]+)$", text, flags=re.M)).group(1)
+        major = next(re.finditer(release_info["version"]["re_major"], text, flags=re.M)).group(1)
+        minor = next(re.finditer(release_info["version"]["re_minor"], text, flags=re.M)).group(1)
+        micro = next(re.finditer(release_info["version"]["re_micro"], text, flags=re.M)).group(1)
         return f"{major}.{minor}.{micro}"
         return f"{major}.{minor}.{micro}"
 
 
 
 
 def main(argv=None) -> int:
 def main(argv=None) -> int:
+    if sys.version_info < (3, 11):
+        logger.error("This script needs at least python 3.11")
+        return 1
+
     parser = argparse.ArgumentParser(allow_abbrev=False, description="Create SDL release artifacts")
     parser = argparse.ArgumentParser(allow_abbrev=False, description="Create SDL release artifacts")
-    parser.add_argument("--root", metavar="DIR", type=Path, default=Path(__file__).absolute().parents[1], help="Root of SDL")
+    parser.add_argument("--root", metavar="DIR", type=Path, default=Path(__file__).absolute().parents[1], help="Root of project")
+    parser.add_argument("--release-info", metavar="JSON", dest="path_release_info", type=Path, default=Path(__file__).absolute().parent / "release-info.json", help="Path of release-info.json")
+    parser.add_argument("--dependency-folder", metavar="FOLDER", dest="deps_path", type=Path, default="deps", help="Directory containing pre-built archives of dependencies (will be removed when downloading archives)")
     parser.add_argument("--out", "-o", metavar="DIR", dest="dist_path", type=Path, default="dist", help="Output directory")
     parser.add_argument("--out", "-o", metavar="DIR", dest="dist_path", type=Path, default="dist", help="Output directory")
     parser.add_argument("--github", action="store_true", help="Script is running on a GitHub runner")
     parser.add_argument("--github", action="store_true", help="Script is running on a GitHub runner")
     parser.add_argument("--commit", default="HEAD", help="Git commit/tag of which a release should be created")
     parser.add_argument("--commit", default="HEAD", help="Git commit/tag of which a release should be created")
-    parser.add_argument("--project", required=True, help="Name of the project (e.g. SDL3")
-    parser.add_argument("--create", choices=["source", "mingw", "win32", "xcframework", "android"], required=True, action="append", dest="actions", help="What to do")
+    parser.add_argument("--actions", choices=["download", "source", "android", "mingw", "msvc", "dmg"], required=True, nargs="+", dest="actions", help="What to do?")
     parser.set_defaults(loglevel=logging.INFO)
     parser.set_defaults(loglevel=logging.INFO)
     parser.add_argument('--vs-year', dest="vs_year", help="Visual Studio year")
     parser.add_argument('--vs-year', dest="vs_year", help="Visual Studio year")
     parser.add_argument('--android-api', type=int, dest="android_api", help="Android API version")
     parser.add_argument('--android-api', type=int, dest="android_api", help="Android API version")
     parser.add_argument('--android-home', dest="android_home", default=os.environ.get("ANDROID_HOME"), help="Android Home folder")
     parser.add_argument('--android-home', dest="android_home", default=os.environ.get("ANDROID_HOME"), help="Android Home folder")
     parser.add_argument('--android-ndk-home', dest="android_ndk_home", default=os.environ.get("ANDROID_NDK_HOME"), help="Android NDK Home folder")
     parser.add_argument('--android-ndk-home', dest="android_ndk_home", default=os.environ.get("ANDROID_NDK_HOME"), help="Android NDK Home folder")
-    parser.add_argument('--android-abis', dest="android_abis", nargs="*", choices=ANDROID_AVAILABLE_ABIS, default=list(ANDROID_AVAILABLE_ABIS), help="Android NDK Home folder")
     parser.add_argument('--cmake-generator', dest="cmake_generator", default="Ninja", help="CMake Generator")
     parser.add_argument('--cmake-generator', dest="cmake_generator", default="Ninja", help="CMake Generator")
     parser.add_argument('--debug', action='store_const', const=logging.DEBUG, dest="loglevel", help="Print script debug information")
     parser.add_argument('--debug', action='store_const', const=logging.DEBUG, dest="loglevel", help="Print script debug information")
     parser.add_argument('--dry-run', action='store_true', dest="dry", help="Don't execute anything")
     parser.add_argument('--dry-run', action='store_true', dest="dry", help="Don't execute anything")
     parser.add_argument('--force', action='store_true', dest="force", help="Ignore a non-clean git tree")
     parser.add_argument('--force', action='store_true', dest="force", help="Ignore a non-clean git tree")
+    parser.add_argument('--overwrite', action='store_true', dest="overwrite", help="Allow potentially overwriting other projects")
+    parser.add_argument('--fast', action='store_true', dest="fast", help="Don't do a rebuild")
 
 
     args = parser.parse_args(argv)
     args = parser.parse_args(argv)
     logging.basicConfig(level=args.loglevel, format='[%(levelname)s] %(message)s')
     logging.basicConfig(level=args.loglevel, format='[%(levelname)s] %(message)s')
-    args.actions = set(args.actions)
+    args.deps_path = args.deps_path.absolute()
     args.dist_path = args.dist_path.absolute()
     args.dist_path = args.dist_path.absolute()
     args.root = args.root.absolute()
     args.root = args.root.absolute()
     args.dist_path = args.dist_path.absolute()
     args.dist_path = args.dist_path.absolute()
@@ -773,6 +1299,9 @@ def main(argv=None) -> int:
     else:
     else:
         section_printer = SectionPrinter()
         section_printer = SectionPrinter()
 
 
+    if args.github and "GITHUB_OUTPUT" not in os.environ:
+        os.environ["GITHUB_OUTPUT"] = "/tmp/github_output.txt"
+
     executer = Executer(root=args.root, dry=args.dry)
     executer = Executer(root=args.root, dry=args.dry)
 
 
     root_git_hash_path = args.root / GIT_HASH_FILENAME
     root_git_hash_path = args.root / GIT_HASH_FILENAME
@@ -783,69 +1312,84 @@ def main(argv=None) -> int:
         if args.commit != archive_commit:
         if args.commit != archive_commit:
             logger.warning("Commit argument is %s, but archive commit is %s. Using %s.", args.commit, archive_commit, archive_commit)
             logger.warning("Commit argument is %s, but archive commit is %s. Using %s.", args.commit, archive_commit, archive_commit)
         args.commit = archive_commit
         args.commit = archive_commit
+        revision = (args.root / REVISION_TXT).read_text().strip()
     else:
     else:
-        args.commit = executer.run(["git", "rev-parse", args.commit], stdout=True, dry_out="e5812a9fd2cda317b503325a702ba3c1c37861d9").stdout.strip()
+        args.commit = executer.check_output(["git", "rev-parse", args.commit], dry_out="e5812a9fd2cda317b503325a702ba3c1c37861d9").strip()
+        revision = executer.check_output(["git", "describe", "--always", "--tags", "--long", args.commit], dry_out="preview-3.1.3-96-g9512f2144").strip()
         logger.info("Using commit %s", args.commit)
         logger.info("Using commit %s", args.commit)
 
 
+    try:
+        with args.path_release_info.open() as f:
+            release_info = json.load(f)
+    except FileNotFoundError:
+        logger.error(f"Could not find {args.path_release_info}")
+
     releaser = Releaser(
     releaser = Releaser(
-        project=args.project,
+        release_info=release_info,
         commit=args.commit,
         commit=args.commit,
+        revision=revision,
         root=args.root,
         root=args.root,
         dist_path=args.dist_path,
         dist_path=args.dist_path,
         executer=executer,
         executer=executer,
         section_printer=section_printer,
         section_printer=section_printer,
         cmake_generator=args.cmake_generator,
         cmake_generator=args.cmake_generator,
+        deps_path=args.deps_path,
+        overwrite=args.overwrite,
+        github=args.github,
+        fast=args.fast,
     )
     )
 
 
     if root_is_maybe_archive:
     if root_is_maybe_archive:
         logger.warning("Building from archive. Skipping clean git tree check.")
         logger.warning("Building from archive. Skipping clean git tree check.")
     else:
     else:
-        porcelain_status = executer.run(["git", "status", "--ignored", "--porcelain"], stdout=True, dry_out="\n").stdout.strip()
+        porcelain_status = executer.check_output(["git", "status", "--ignored", "--porcelain"], dry_out="\n").strip()
         if porcelain_status:
         if porcelain_status:
             print(porcelain_status)
             print(porcelain_status)
             logger.warning("The tree is dirty! Do not publish any generated artifacts!")
             logger.warning("The tree is dirty! Do not publish any generated artifacts!")
             if not args.force:
             if not args.force:
                 raise Exception("The git repo contains modified and/or non-committed files. Run with --force to ignore.")
                 raise Exception("The git repo contains modified and/or non-committed files. Run with --force to ignore.")
 
 
+    if args.fast:
+        logger.warning("Doing fast build! Do not publish generated artifacts!")
+
     with section_printer.group("Arguments"):
     with section_printer.group("Arguments"):
-        print(f"project          = {args.project}")
+        print(f"project          = {releaser.project}")
         print(f"version          = {releaser.version}")
         print(f"version          = {releaser.version}")
+        print(f"revision         = {revision}")
         print(f"commit           = {args.commit}")
         print(f"commit           = {args.commit}")
         print(f"out              = {args.dist_path}")
         print(f"out              = {args.dist_path}")
         print(f"actions          = {args.actions}")
         print(f"actions          = {args.actions}")
         print(f"dry              = {args.dry}")
         print(f"dry              = {args.dry}")
         print(f"force            = {args.force}")
         print(f"force            = {args.force}")
+        print(f"overwrite        = {args.overwrite}")
         print(f"cmake_generator  = {args.cmake_generator}")
         print(f"cmake_generator  = {args.cmake_generator}")
 
 
     releaser.prepare()
     releaser.prepare()
 
 
+    if "download" in args.actions:
+        releaser.download_dependencies()
+
+    if set(args.actions).intersection({"msvc", "mingw", "android"}):
+        print("Verifying presence of dependencies (run 'download' action to download) ...")
+        releaser.verify_dependencies()
+        print("... done")
+
     if "source" in args.actions:
     if "source" in args.actions:
         if root_is_maybe_archive:
         if root_is_maybe_archive:
             raise Exception("Cannot build source archive from source archive")
             raise Exception("Cannot build source archive from source archive")
         with section_printer.group("Create source archives"):
         with section_printer.group("Create source archives"):
             releaser.create_source_archives()
             releaser.create_source_archives()
 
 
-    if "xcframework" in args.actions:
+    if "dmg" in args.actions:
         if platform.system() != "Darwin" and not args.dry:
         if platform.system() != "Darwin" and not args.dry:
-            parser.error("xcframework artifact(s) can only be built on Darwin")
+            parser.error("framework artifact(s) can only be built on Darwin")
 
 
-        releaser.create_xcframework()
+        releaser.create_dmg()
 
 
-    if "win32" in args.actions:
+    if "msvc" in args.actions:
         if platform.system() != "Windows" and not args.dry:
         if platform.system() != "Windows" and not args.dry:
-            parser.error("win32 artifact(s) can only be built on Windows")
-        with section_printer.group("Find Visual Studio"):
-            vs = VisualStudio(executer=executer)
-        arm64 = releaser.build_vs_cmake(arch="arm64", arch_cmake="ARM64")
-        x86 = releaser.build_vs(arch="x86", platform="Win32", vs=vs)
-        x64 = releaser.build_vs(arch="x64", platform="x64", vs=vs)
-        with section_printer.group("Create SDL VC development zip"):
-            arch_vc = {
-                "x86": x86,
-                "x64": x64,
-                "arm64": arm64,
-            }
-            releaser.build_vs_devel(arch_vc)
+            parser.error("msvc artifact(s) can only be built on Windows")
+        releaser.build_msvc()
 
 
     if "mingw" in args.actions:
     if "mingw" in args.actions:
         releaser.create_mingw_archives()
         releaser.create_mingw_archives()
@@ -857,30 +1401,22 @@ def main(argv=None) -> int:
             parser.error("Invalid $ANDROID_NDK_HOME or --android_ndk_home: must be a directory containing the Android NDK")
             parser.error("Invalid $ANDROID_NDK_HOME or --android_ndk_home: must be a directory containing the Android NDK")
         if args.android_api is None:
         if args.android_api is None:
             with section_printer.group("Detect Android APIS"):
             with section_printer.group("Detect Android APIS"):
-                args.android_api = releaser.detect_android_api(android_home=args.android_home)
+                args.android_api = releaser._detect_android_api(android_home=args.android_home)
         if args.android_api is None or not (Path(args.android_home) / f"platforms/android-{args.android_api}").is_dir():
         if args.android_api is None or not (Path(args.android_home) / f"platforms/android-{args.android_api}").is_dir():
             parser.error("Invalid --android-api, and/or could not be detected")
             parser.error("Invalid --android-api, and/or could not be detected")
-        if not args.android_abis:
-            parser.error("Need at least one Android ABI")
         with section_printer.group("Android arguments"):
         with section_printer.group("Android arguments"):
             print(f"android_home     = {args.android_home}")
             print(f"android_home     = {args.android_home}")
             print(f"android_ndk_home = {args.android_ndk_home}")
             print(f"android_ndk_home = {args.android_ndk_home}")
             print(f"android_api      = {args.android_api}")
             print(f"android_api      = {args.android_api}")
-            print(f"android_abis     = {args.android_abis}")
         releaser.create_android_archives(
         releaser.create_android_archives(
             android_api=args.android_api,
             android_api=args.android_api,
             android_home=args.android_home,
             android_home=args.android_home,
             android_ndk_home=args.android_ndk_home,
             android_ndk_home=args.android_ndk_home,
-            android_abis=args.android_abis,
         )
         )
-
-
     with section_printer.group("Summary"):
     with section_printer.group("Summary"):
         print(f"artifacts = {releaser.artifacts}")
         print(f"artifacts = {releaser.artifacts}")
 
 
     if args.github:
     if args.github:
-        if args.dry:
-            os.environ["GITHUB_OUTPUT"] = "/tmp/github_output.txt"
         with open(os.environ["GITHUB_OUTPUT"], "a") as f:
         with open(os.environ["GITHUB_OUTPUT"], "a") as f:
             f.write(f"project={releaser.project}\n")
             f.write(f"project={releaser.project}\n")
             f.write(f"version={releaser.version}\n")
             f.write(f"version={releaser.version}\n")

+ 4 - 1
build-scripts/create-android-project.py

@@ -45,6 +45,9 @@ def android_mk_use_prefab(path: Path) -> None:
 
 
     data, _ = re.subn("[\n]{3,}", "\n\n", data)
     data, _ = re.subn("[\n]{3,}", "\n\n", data)
 
 
+    data, count = re.subn(r"(LOCAL_SHARED_LIBRARIES\s*:=\s*SDL3)", "LOCAL_SHARED_LIBRARIES := SDL3 SDL3-Headers", data)
+    assert count == 1, f"Must have injected SDL3-Headers in {path} exactly once"
+
     newdata = data + textwrap.dedent("""
     newdata = data + textwrap.dedent("""
         # https://google.github.io/prefab/build-systems.html
         # https://google.github.io/prefab/build-systems.html
 
 
@@ -116,7 +119,7 @@ def main() -> int:
     description = "Create a simple Android gradle project from input sources."
     description = "Create a simple Android gradle project from input sources."
     epilog = textwrap.dedent("""\
     epilog = textwrap.dedent("""\
         You need to manually copy a prebuilt SDL3 Android archive into the project tree when using the aar variant.
         You need to manually copy a prebuilt SDL3 Android archive into the project tree when using the aar variant.
-        
+
         Any changes you have done to the sources in the Android project will be lost
         Any changes you have done to the sources in the Android project will be lost
     """)
     """)
     parser = ArgumentParser(description=description, epilog=epilog, allow_abbrev=False)
     parser = ArgumentParser(description=description, epilog=epilog, allow_abbrev=False)

+ 43 - 0
build-scripts/create-release.py

@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+import argparse
+from pathlib import Path
+import json
+import logging
+import re
+import subprocess
+
+ROOT = Path(__file__).resolve().parents[1]
+
+
+def determine_remote() -> str:
+    text = (ROOT / "build-scripts/release-info.json").read_text()
+    release_info = json.loads(text)
+    if "remote" in release_info:
+        return release_info["remote"]
+    project_with_version = release_info["name"]
+    project, _ = re.subn("([^a-zA-Z_])", "", project_with_version)
+    return f"libsdl-org/{project}"
+
+
+def main():
+    default_remote = determine_remote()
+
+    current_commit = subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=ROOT, text=True).strip()
+
+    parser = argparse.ArgumentParser(allow_abbrev=False)
+    parser.add_argument("--ref", required=True, help=f"Name of branch or tag containing release.yml")
+    parser.add_argument("--remote", "-R", default=default_remote, help=f"Remote repo (default={default_remote})")
+    parser.add_argument("--commit", default=current_commit, help=f"Commit (default={current_commit})")
+    args = parser.parse_args()
+
+
+    print(f"Running release.yml workflow:")
+    print(f"  commit = {args.commit}")
+    print(f"  remote = {args.remote}")
+
+    subprocess.check_call(["gh", "-R", args.remote, "workflow", "run", "release.yml", "--ref", args.ref, "-f", f"commit={args.commit}"], cwd=ROOT)
+
+
+if __name__ == "__main__":
+    raise SystemExit(main())

+ 0 - 6
build-scripts/create-release.sh

@@ -1,6 +0,0 @@
-#!/bin/sh
-
-commit=$(git rev-parse HEAD)
-echo "Creating release workflow for commit $commit"
-gh workflow run release.yml --ref main -f commit=$commit
-

+ 61 - 58
build-scripts/pkg-support/android/INSTALL.md.in

@@ -1,58 +1,61 @@
-
-This Android archive allows use of @PROJECT_NAME@ in your Android project, without needing to copy any SDL source.
-For integration with CMake/ndk-build, it uses [prefab](https://google.github.io/prefab/).
-
-Copy this archive (@PROJECT_NAME@-@PROJECT_VERSION@.aar) to a `app/libs` directory of your project.
-
-In `app/build.gradle` of your Android project, add:
-```
-android {
-    /* ... */
-    buildFeatures {
-        prefab true
-    }
-}
-dependencies {
-    implementation files('libs/@PROJECT_NAME@-@PROJECT_VERSION@.aar')
-    /* ... */
-}
-```
-
-If you're using CMake, add the following to your CMakeLists.txt:
-```
-find_package(@PROJECT_NAME@ REQUIRED CONFIG)
-target_link_libraries(yourgame PRIVATE @PROJECT_NAME@::@PROJECT_NAME@)
-```
-
-If you're using ndk-build, add the following somewhere after `LOCAL_MODULE := yourgame` to your `Android.mk` or `Application.mk`:
-```
-# https://google.github.io/prefab/build-systems.html
-
-# Add the prefab modules to the import path.
-$(call import-add-path,/out)
-
-# Import @PROJECT_NAME@ so we can depend on it.
-$(call import-module,prefab/@PROJECT_NAME@)
-```
-
----
-
-For advanced users:
-
-If you want to build a 3rd party library outside Gradle,
-running the following command will extract the Android archive into a more common directory structure.
-```
-python @PROJECT_NAME@-@PROJECT_VERSION@.aar -o android_prefix
-```
-Add `--help` for a list of all available options.
-
-
-Look at the example programs in ./test (of the source archive), and check out online documentation:
-    https://wiki.libsdl.org/SDL3/FrontPage
-
-Join the SDL discourse server if you want to join the community:
-    https://discourse.libsdl.org/
-
-
-That's it!
-Sam Lantinga <slouken@libsdl.org>
+This Android archive allows use of @<@PROJECT_NAME@>@ in your Android project, without needing to copy any SDL source.
+For integration with CMake/ndk-build, it uses [prefab](https://google.github.io/prefab/).
+
+Copy this archive (@<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar) to a `app/libs` directory of your project.
+
+In `app/build.gradle` of your Android project, add:
+```
+android {
+    /* ... */
+    buildFeatures {
+        prefab true
+    }
+}
+dependencies {
+    implementation files('libs/@<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar')
+    /* ... */
+}
+```
+
+If you're using CMake, add the following to your CMakeLists.txt:
+```
+find_package(@<@PROJECT_NAME@>@ REQUIRED CONFIG)
+target_link_libraries(yourgame PRIVATE @<@PROJECT_NAME@>@::@<@PROJECT_NAME@>@)
+```
+
+If you use ndk-build, add the following before `include $(BUILD_SHARED_LIBRARY)` to your `Android.mk`:
+```
+LOCAL_SHARED_LIBARARIES := SDL3 SDL3-Headers
+```
+And add the following at the bottom:
+```
+# https://google.github.io/prefab/build-systems.html
+
+# Add the prefab modules to the import path.
+$(call import-add-path,/out)
+
+# Import @<@PROJECT_NAME@>@ so we can depend on it.
+$(call import-module,prefab/@<@PROJECT_NAME@>@)
+```
+
+---
+
+For advanced users:
+
+If you want to build a 3rd party library outside Gradle,
+running the following command will extract the Android archive into a more common directory structure.
+```
+python @<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar -o android_prefix
+```
+Add `--help` for a list of all available options.
+
+
+Look at the example programs in ./test (of the source archive), and check out online documentation:
+    https://wiki.libsdl.org/SDL3/FrontPage
+
+Join the SDL discourse server if you want to join the community:
+    https://discourse.libsdl.org/
+
+
+That's it!
+Sam Lantinga <slouken@libsdl.org>

+ 7 - 7
build-scripts/pkg-support/android/__main__.py.in

@@ -1,12 +1,12 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
 
 
 """
 """
-Create a @PROJECT_NAME@ SDK prefix from an Android archive
+Create a @<@PROJECT_NAME@>@ SDK prefix from an Android archive
 This file is meant to be placed in a the root of an android .aar archive
 This file is meant to be placed in a the root of an android .aar archive
 
 
 Example usage:
 Example usage:
 ```sh
 ```sh
-python @PROJECT_NAME@-@PROJECT_VERSION@.aar -o /usr/opt/android-sdks
+python @<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar -o /usr/opt/android-sdks
 cmake -S my-project \
 cmake -S my-project \
     -DCMAKE_PREFIX_PATH=/usr/opt/android-sdks \
     -DCMAKE_PREFIX_PATH=/usr/opt/android-sdks \
     -DCMAKE_TOOLCHAIN_FILE=$ANDROID_NDK_HOME/build/cmake/android.toolchain.cmake \
     -DCMAKE_TOOLCHAIN_FILE=$ANDROID_NDK_HOME/build/cmake/android.toolchain.cmake \
@@ -31,13 +31,14 @@ ANDROID_ARCHS = { "armeabi-v7a", "arm64-v8a", "x86", "x86_64" }
 
 
 def main():
 def main():
     parser = argparse.ArgumentParser(
     parser = argparse.ArgumentParser(
-        description="Convert a @PROJECT_NAME@ Android .aar archive into a SDK",
+        description="Convert a @<@PROJECT_NAME@>@ Android .aar archive into a SDK",
         allow_abbrev=False,
         allow_abbrev=False,
     )
     )
+    parser.add_argument("--version", action="version", version="@<@PROJECT_NAME@>@ @<@PROJECT_VERSION@>@")
     parser.add_argument("-o", dest="output", type=pathlib.Path, required=True, help="Folder where to store the SDK")
     parser.add_argument("-o", dest="output", type=pathlib.Path, required=True, help="Folder where to store the SDK")
     args = parser.parse_args()
     args = parser.parse_args()
 
 
-    print(f"Creating a @PROJECT_NAME@ SDK at {args.output}...")
+    print(f"Creating a @<@PROJECT_NAME@>@ SDK at {args.output}...")
 
 
     prefix = args.output
     prefix = args.output
     incdir = prefix / "include"
     incdir = prefix / "include"
@@ -92,9 +93,8 @@ def main():
                 jarpath = javadir / f"{project_name}-{project_version}-sources.jar"
                 jarpath = javadir / f"{project_name}-{project_version}-sources.jar"
                 read_zipfile_and_write(jarpath, zippath)
                 read_zipfile_and_write(jarpath, zippath)
             elif zippath == "classes-doc.jar":
             elif zippath == "classes-doc.jar":
-                data = zf.read(zippath)
-                with zipfile.ZipFile(io.BytesIO(data)) as doc_zf:
-                    doc_zf.extractall(javadocdir)
+                jarpath = javadocdir / f"{project_name}-{project_version}-javadoc.jar"
+                read_zipfile_and_write(jarpath, zippath)
 
 
     print("... done")
     print("... done")
     return 0
     return 0

+ 3 - 22
build-scripts/pkg-support/android/cmake/SDL3ConfigVersion.cmake → build-scripts/pkg-support/android/cmake/SDL3ConfigVersion.cmake.in

@@ -1,26 +1,7 @@
-# based on the files generated by CMake's write_basic_package_version_file
+# @<@PROJECT_NAME@>@ CMake version configuration file:
+# This file is meant to be placed in a lib/cmake/@<@PROJECT_NAME@>@ subfolder of a reconstructed Android SDL3 SDK
 
 
-# SDL CMake version configuration file:
-# This file is meant to be placed in a lib/cmake/SDL3 subfolder of a reconstructed Android SDL3 SDK
-
-if(NOT EXISTS "${CMAKE_CURRENT_LIST_DIR}/../../../include/SDL3/SDL_version.h")
-    message(AUTHOR_WARNING "Could not find SDL3/SDL_version.h. This script is meant to be placed in the root of SDL3-devel-3.x.y-VC")
-    return()
-endif()
-
-file(READ "${CMAKE_CURRENT_LIST_DIR}/../../../include/SDL3/SDL_version.h" _sdl_version_h)
-string(REGEX MATCH "#define[ \t]+SDL_MAJOR_VERSION[ \t]+([0-9]+)" _sdl_major_re "${_sdl_version_h}")
-set(_sdl_major "${CMAKE_MATCH_1}")
-string(REGEX MATCH "#define[ \t]+SDL_MINOR_VERSION[ \t]+([0-9]+)" _sdl_minor_re "${_sdl_version_h}")
-set(_sdl_minor "${CMAKE_MATCH_1}")
-string(REGEX MATCH "#define[ \t]+SDL_MICRO_VERSION[ \t]+([0-9]+)" _sdl_micro_re "${_sdl_version_h}")
-set(_sdl_micro "${CMAKE_MATCH_1}")
-if(_sdl_major_re AND _sdl_minor_re AND _sdl_micro_re)
-    set(PACKAGE_VERSION "${_sdl_major}.${_sdl_minor}.${_sdl_micro}")
-else()
-    message(AUTHOR_WARNING "Could not extract version from SDL3/SDL_version.h.")
-    return()
-endif()
+set(PACKAGE_VERSION "@<@PROJECT_VERSION@>@")
 
 
 if(PACKAGE_FIND_VERSION_RANGE)
 if(PACKAGE_FIND_VERSION_RANGE)
     # Package version must be in the requested version range
     # Package version must be in the requested version range

+ 5 - 0
build-scripts/pkg-support/android/description.json.in

@@ -0,0 +1,5 @@
+{
+    "name": "@<@PROJECT_NAME@>@",
+    "version": "@<@PROJECT_VERSION@>@",
+    "git-hash": "@<@PROJECT_COMMIT@>@"
+}

+ 2 - 0
build-scripts/pkg-support/mingw/Makefile

@@ -26,6 +26,8 @@ install-x86_64:
 
 
 install-all:
 install-all:
 	@if test -d $(DESTDIR); then \
 	@if test -d $(DESTDIR); then \
+		mkdir -p $(DESTDIR)/cmake; \
+		cp -rv cmake/* $(DESTDIR)/cmake; \
 		for arch in $(ARCHITECTURES); do \
 		for arch in $(ARCHITECTURES); do \
 			$(MAKE) install ARCH=$$arch DESTDIR=$(DESTDIR)/$$arch; \
 			$(MAKE) install ARCH=$$arch DESTDIR=$(DESTDIR)/$$arch; \
 		done \
 		done \

+ 8 - 0
build-scripts/pkg-support/msvc/Directory.Build.props

@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemDefinitionGroup>
+    <ClCompile>
+      <PreprocessorDefinitions>SDL_VENDOR_INFO="libsdl.org";%(PreprocessorDefinitions)</PreprocessorDefinitions>
+    </ClCompile>
+  </ItemDefinitionGroup>
+</Project>

+ 3 - 3
VisualC/pkg-support/cmake/sdl3-config.cmake → build-scripts/pkg-support/msvc/cmake/SDL3Config.cmake.in

@@ -1,5 +1,5 @@
-# SDL CMake configuration file:
-# This file is meant to be placed in a cmake subfolder of SDL3-devel-3.x.y-VC
+# @<@PROJECT_NAME@>@ CMake configuration file:
+# This file is meant to be placed in a cmake subfolder of @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip
 
 
 cmake_minimum_required(VERSION 3.0...3.5)
 cmake_minimum_required(VERSION 3.0...3.5)
 
 
@@ -105,7 +105,7 @@ else()
 endif()
 endif()
 unset(_sdl3test_lib)
 unset(_sdl3test_lib)
 
 
-if(SDL3_SDL3-shared_FOUND)
+if(SDL3_SDL3-shared_FOUND OR SDL3_SDL3-static_FOUND)
     set(SDL3_SDL3_FOUND TRUE)
     set(SDL3_SDL3_FOUND TRUE)
 endif()
 endif()
 
 

+ 3 - 22
VisualC/pkg-support/cmake/sdl3-config-version.cmake → build-scripts/pkg-support/msvc/cmake/SDL3ConfigVersion.cmake.in

@@ -1,26 +1,7 @@
-# based on the files generated by CMake's write_basic_package_version_file
+# @<@PROJECT_NAME@>@ CMake version configuration file:
+# This file is meant to be placed in a cmake subfolder of @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip
 
 
-# SDL CMake version configuration file:
-# This file is meant to be placed in a cmake subfolder of SDL3-devel-3.x.y-VC
-
-if(NOT EXISTS "${CMAKE_CURRENT_LIST_DIR}/../include/SDL3/SDL_version.h")
-    message(AUTHOR_WARNING "Could not find SDL3/SDL_version.h. This script is meant to be placed in the root of SDL3-devel-3.x.y-VC")
-    return()
-endif()
-
-file(READ "${CMAKE_CURRENT_LIST_DIR}/../include/SDL3/SDL_version.h" _sdl_version_h)
-string(REGEX MATCH "#define[ \t]+SDL_MAJOR_VERSION[ \t]+([0-9]+)" _sdl_major_re "${_sdl_version_h}")
-set(_sdl_major "${CMAKE_MATCH_1}")
-string(REGEX MATCH "#define[ \t]+SDL_MINOR_VERSION[ \t]+([0-9]+)" _sdl_minor_re "${_sdl_version_h}")
-set(_sdl_minor "${CMAKE_MATCH_1}")
-string(REGEX MATCH "#define[ \t]+SDL_MICRO_VERSION[ \t]+([0-9]+)" _sdl_micro_re "${_sdl_version_h}")
-set(_sdl_micro "${CMAKE_MATCH_1}")
-if(_sdl_major_re AND _sdl_minor_re AND _sdl_micro_re)
-    set(PACKAGE_VERSION "${_sdl_major}.${_sdl_minor}.${_sdl_micro}")
-else()
-    message(AUTHOR_WARNING "Could not extract version from SDL3/SDL_version.h.")
-    return()
-endif()
+set(PACKAGE_VERSION "@<@PROJECT_VERSION@>@")
 
 
 if(PACKAGE_FIND_VERSION_RANGE)
 if(PACKAGE_FIND_VERSION_RANGE)
     # Package version must be in the requested version range
     # Package version must be in the requested version range

+ 41 - 0
build-scripts/pkg-support/source/SDL_revision.h.cmake.in

@@ -0,0 +1,41 @@
+/*
+ Simple DirectMedia Layer
+ Copyright (C) 1997-2024 Sam Lantinga <slouken@libsdl.org>
+
+ This software is provided 'as-is', without any express or implied
+ warranty.  In no event will the authors be held liable for any damages
+ arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it
+ freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+*/
+
+/* WIKI CATEGORY: Version */
+
+/*
+ * SDL_revision.h contains the SDL revision, which might be defined on the
+ * compiler command line, or generated right into the header itself by the
+ * build system.
+ */
+
+#ifndef SDL_revision_h_
+#define SDL_revision_h_
+
+#cmakedefine SDL_VENDOR_INFO "@SDL_VENDOR_INFO@"
+
+#if defined(SDL_VENDOR_INFO)
+#define SDL_REVISION "@<@PROJECT_REVISION@>@ (" SDL_VENDOR_INFO ")"
+#else
+#define SDL_REVISION "@<@PROJECT_REVISION@>@"
+#endif
+
+#endif /* SDL_revision_h_ */

+ 56 - 0
build-scripts/pkg-support/source/SDL_revision.h.in

@@ -0,0 +1,56 @@
+/*
+ Simple DirectMedia Layer
+ Copyright (C) 1997-2024 Sam Lantinga <slouken@libsdl.org>
+
+ This software is provided 'as-is', without any express or implied
+ warranty.  In no event will the authors be held liable for any damages
+ arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it
+ freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not
+    claim that you wrote the original software. If you use this software
+    in a product, an acknowledgment in the product documentation would be
+    appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+*/
+
+/* WIKI CATEGORY: Version */
+
+/*
+ * SDL_revision.h contains the SDL revision, which might be defined on the
+ * compiler command line, or generated right into the header itself by the
+ * build system.
+ */
+
+#ifndef SDL_revision_h_
+#define SDL_revision_h_
+
+#ifdef SDL_WIKI_DOCUMENTATION_SECTION
+
+/**
+ * This macro is a string describing the source at a particular point in
+ * development.
+ *
+ * This string is often generated from revision control's state at build time.
+ *
+ * This string can be quite complex and does not follow any standard. For
+ * example, it might be something like "SDL-prerelease-3.1.1-47-gf687e0732".
+ * It might also be user-defined at build time, so it's best to treat it as a
+ * clue in debugging forensics and not something the app will parse in any
+ * way.
+ *
+ * \since This macro is available since SDL 3.0.0.
+ */
+#define SDL_REVISION "Some arbitrary string decided at SDL build time"
+#elif defined(SDL_VENDOR_INFO)
+#define SDL_REVISION "@<@PROJECT_REVISION@>@ (" SDL_VENDOR_INFO ")"
+#else
+#define SDL_REVISION "@<@PROJECT_REVISION@>@"
+#endif
+
+#endif /* SDL_revision_h_ */

+ 215 - 0
build-scripts/release-info.json

@@ -0,0 +1,215 @@
+{
+  "name": "SDL3",
+  "remote": "libsdl-org/SDL",
+  "version": {
+    "file": "include/SDL3/SDL_version.h",
+    "re_major": "^#define SDL_MAJOR_VERSION\\s+([0-9]+)$",
+    "re_minor": "^#define SDL_MINOR_VERSION\\s+([0-9]+)$",
+    "re_micro": "^#define SDL_MICRO_VERSION\\s+([0-9]+)$"
+  },
+  "source": {
+    "checks": [
+      "src/SDL.c",
+      "include/SDL3/SDL.h",
+      "test/testsprite.c",
+      "android-project/app/src/main/java/org/libsdl/app/SDLActivity.java"
+    ],
+    "files": {
+      "include/SDL3": [
+        "build-scripts/pkg-support/source/SDL_revision.h.in:SDL_revision.h"
+      ],
+      "include/build_config": [
+        "build-scripts/pkg-support/source/SDL_revision.h.cmake.in:SDL_revision.h.cmake"
+      ]
+    }
+  },
+  "dmg": {
+    "project": "Xcode/SDL/SDL.xcodeproj",
+    "path": "Xcode/SDL/build/SDL3.dmg",
+    "target": "SDL3.dmg",
+    "build-xcconfig": "Xcode/SDL/pkg-support/build.xcconfig"
+  },
+  "mingw": {
+    "cmake": {
+      "archs": ["x86", "x64"],
+      "args": [
+        "-DSDL_SHARED=ON",
+        "-DSDL_STATIC=ON",
+        "-DSDL_DISABLE_INSTALL_DOCS=ON",
+        "-DSDL_TEST_LIBRARY=ON",
+        "-DSDL_VENDOR_INFO=libsdl.org",
+        "-DSDL_TESTS=OFF"
+      ],
+      "shared-static": "args"
+    },
+    "files": {
+      "": [
+        "build-scripts/pkg-support/mingw/INSTALL.txt",
+        "build-scripts/pkg-support/mingw/Makefile",
+        "BUGS.txt",
+        "CREDITS.md",
+        "README-SDL.txt",
+        "WhatsNew.txt",
+        "LICENSE.txt",
+        "README.md"
+      ],
+      "cmake": [
+        "build-scripts/pkg-support/mingw/cmake/SDL3Config.cmake",
+        "build-scripts/pkg-support/mingw/cmake/SDL3ConfigVersion.cmake"
+      ],
+      "docs": [
+        "docs/*"
+      ],
+      "test": [
+        "test/*"
+      ]
+    }
+  },
+  "msvc": {
+    "msbuild": {
+      "archs": [
+        "x86",
+        "x64"
+      ],
+      "directory-build-props": "build-scripts/pkg-support/msvc/Directory.Build.props",
+      "projects": [
+        "VisualC/SDL/SDL.vcxproj",
+        "VisualC/SDL_test/SDL_test.vcxproj"
+      ],
+      "files-lib": {
+        "": [
+          "VisualC/SDL/@<@PLATFORM@>@/@<@CONFIGURATION@>@/SDL3.dll"
+        ]
+      },
+      "files-devel": {
+        "lib/@<@ARCH@>@": [
+          "VisualC/SDL/@<@PLATFORM@>@/@<@CONFIGURATION@>@/SDL3.dll",
+          "VisualC/SDL/@<@PLATFORM@>@/@<@CONFIGURATION@>@/SDL3.lib",
+          "VisualC/SDL/@<@PLATFORM@>@/@<@CONFIGURATION@>@/SDL3.pdb",
+          "VisualC/SDL_test/@<@PLATFORM@>@/@<@CONFIGURATION@>@/SDL3_test.lib"
+        ]
+      }
+    },
+    "cmake": {
+      "archs": [
+        "arm64"
+      ],
+      "args": [
+        "-DSDL_SHARED=ON",
+        "-DSDL_STATIC=OFF",
+        "-DSDL_TEST_LIBRARY=ON",
+        "-DSDL_TESTS=OFF",
+        "-DSDL_DISABLE_INSTALL_DOCS=ON",
+        "-DSDL_VENDOR_INFO=libsdl.org"
+      ],
+      "files-lib": {
+        "": [
+          "bin/SDL3.dll"
+        ]
+      },
+      "files-devel": {
+        "lib/@<@ARCH@>@": [
+          "bin/SDL3.dll",
+          "bin/SDL3.pdb",
+          "lib/SDL3.lib",
+          "lib/SDL3_test.lib"
+        ]
+      }
+    },
+    "files-lib": {
+      "": [
+        "README-SDL.txt"
+      ]
+    },
+    "files-devel": {
+      "": [
+        "README-SDL.txt",
+        "BUGS.txt",
+        "LICENSE.txt",
+        "README.md",
+        "WhatsNew.txt"
+      ],
+      "cmake": [
+        "build-scripts/pkg-support/msvc/cmake/SDL3Config.cmake.in:SDL3Config.cmake",
+        "build-scripts/pkg-support/msvc/cmake/SDL3ConfigVersion.cmake.in:SDL3ConfigVersion.cmake",
+        "cmake/sdlcpu.cmake"
+      ],
+      "docs": [
+        "docs/*"
+      ],
+      "include/SDL3": [
+        "include/SDL3/*.h"
+      ]
+    }
+  },
+  "android": {
+    "cmake": {
+      "args": [
+        "-DSDL_SHARED=ON",
+        "-DSDL_STATIC=OFF",
+        "-DSDL_TEST_LIBRARY=ON",
+        "-DSDL_TESTS=OFF",
+        "-DSDL_DISABLE_ANDROID_JAR=OFF",
+        "-DSDL_DISABLE_INSTALL=OFF",
+        "-DSDL_DISABLE_INSTALL_DOCS=OFF",
+        "-DSDL_VENDOR_INFO=libsdl.org"
+      ]
+    },
+    "modules": {
+      "SDL3-Headers": {
+        "type": "interface",
+        "includes": {
+          "SDL3": ["include/SDL3/*.h"]
+        }
+      },
+      "Headers": {
+        "type": "interface",
+        "export-libraries": [":SDL3-Headers"]
+      },
+      "SDL3_test": {
+        "type": "library",
+        "library": "lib/libSDL3_test.a",
+        "export-libraries": [":Headers"]
+      },
+      "SDL3-shared": {
+        "type": "library",
+        "library": "lib/libSDL3.so",
+        "export-libraries": [":Headers"]
+      },
+      "SDL3": {
+        "type": "interface",
+        "export-libraries": [":SDL3-shared"]
+      }
+    },
+    "jars": {
+      "classes": "share/java/@<@PROJECT_NAME@>@/@<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.jar",
+      "sources": "share/java/@<@PROJECT_NAME@>@/@<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@-sources.jar",
+      "doc": "share/javadoc/@<@PROJECT_NAME@>@/@<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@-javadoc.jar"
+    },
+    "abis": [
+      "armeabi-v7a",
+      "arm64-v8a",
+      "x86",
+      "x86_64"
+    ],
+    "api-minimum": 19,
+    "api-target": 29,
+    "ndk-minimum": 21,
+    "files": {
+      "": [
+        "android-project/app/proguard-rules.pro:proguard.txt",
+        "build-scripts/pkg-support/android/INSTALL.md.in:INSTALL.md",
+        "build-scripts/pkg-support/android/__main__.py.in:__main__.py",
+        "build-scripts/pkg-support/android/description.json.in:description.json"
+      ],
+      "META-INF": [
+        "LICENSE.txt"
+      ],
+      "cmake": [
+        "cmake/sdlcpu.cmake",
+        "build-scripts/pkg-support/android/cmake/SDL3Config.cmake",
+        "build-scripts/pkg-support/android/cmake/SDL3ConfigVersion.cmake.in:SDL3ConfigVersion.cmake"
+      ]
+    }
+  }
+}

+ 0 - 1
build-scripts/updaterev.sh

@@ -40,7 +40,6 @@ if [ "$rev" != "" ]; then
     echo "#else" >>"$header.new"
     echo "#else" >>"$header.new"
     echo "#define SDL_REVISION \"SDL-$rev\"" >>"$header.new"
     echo "#define SDL_REVISION \"SDL-$rev\"" >>"$header.new"
     echo "#endif" >>"$header.new"
     echo "#endif" >>"$header.new"
-    echo "#define SDL_REVISION_NUMBER 0" >>"$header.new"
     if diff $header $header.new >/dev/null 2>&1; then
     if diff $header $header.new >/dev/null 2>&1; then
         rm "$header.new"
         rm "$header.new"
     else
     else

+ 3 - 2
cmake/android/FindSdlAndroidPlatform.cmake

@@ -104,8 +104,9 @@ endfunction()
 set(SDL_ANDROID_PLATFORM_ANDROID_JAR "SDL_ANDROID_PLATFORM_ANDROID_JAR-NOTFOUND")
 set(SDL_ANDROID_PLATFORM_ANDROID_JAR "SDL_ANDROID_PLATFORM_ANDROID_JAR-NOTFOUND")
 
 
 if(NOT DEFINED SDL_ANDROID_PLATFORM_ROOT)
 if(NOT DEFINED SDL_ANDROID_PLATFORM_ROOT)
-  _sdl_find_android_platform_root(SDL_ANDROID_PLATFORM_ROOT)
-  set(SDL_ANDROID_PLATFORM_ROOT "${SDL_ANDROID_PLATFORM_ROOT}" CACHE PATH "Path of Android platform")
+  _sdl_find_android_platform_root(_new_sdl_android_platform_root)
+  set(SDL_ANDROID_PLATFORM_ROOT "${_new_sdl_android_platform_root}" CACHE PATH "Path of Android platform")
+  unset(_new_sdl_android_platform_root)
 endif()
 endif()
 if(SDL_ANDROID_PLATFORM_ROOT)
 if(SDL_ANDROID_PLATFORM_ROOT)
   _sdl_is_valid_android_platform_root(_valid SDL_ANDROID_PLATFORM_VERSION "${SDL_ANDROID_PLATFORM_ROOT}")
   _sdl_is_valid_android_platform_root(_valid SDL_ANDROID_PLATFORM_VERSION "${SDL_ANDROID_PLATFORM_ROOT}")

+ 1 - 1
cmake/sdlcompilers.cmake

@@ -46,7 +46,7 @@ function(SDL_AddCommonCompilerFlags TARGET)
       cmake_push_check_state()
       cmake_push_check_state()
       check_c_compiler_flag("-gdwarf-4" HAVE_GDWARF_4)
       check_c_compiler_flag("-gdwarf-4" HAVE_GDWARF_4)
       if(HAVE_GDWARF_4)
       if(HAVE_GDWARF_4)
-        target_compile_options(${TARGET} PRIVATE "-gdwarf-4")
+        target_compile_options(${TARGET} PRIVATE "$<$<OR:$<CONFIG:Debug>,$<CONFIG:RelWithDebInfo>>:-gdwarf-4>")
       endif()
       endif()
       cmake_pop_check_state()
       cmake_pop_check_state()
     endif()
     endif()

+ 156 - 148
cmake/sdlcpu.cmake

@@ -1,148 +1,156 @@
-function(SDL_DetectTargetCPUArchitectures DETECTED_ARCHS)
-
-  set(known_archs EMSCRIPTEN ARM32 ARM64 ARM64EC LOONGARCH64 POWERPC32 POWERPC64 X86 X64)
-
-  if(APPLE AND CMAKE_OSX_ARCHITECTURES)
-    foreach(known_arch IN LISTS known_archs)
-      set(SDL_CPU_${known_arch} "0")
-    endforeach()
-    set(detected_archs)
-    foreach(osx_arch IN LISTS CMAKE_OSX_ARCHITECTURES)
-      if(osx_arch STREQUAL "x86_64")
-        set(SDL_CPU_X64 "1")
-        list(APPEND detected_archs "X64")
-      elseif(osx_arch STREQUAL "arm64")
-        set(SDL_CPU_ARM64 "1")
-        list(APPEND detected_archs "ARM64")
-      endif()
-    endforeach()
-    set("${DETECTED_ARCHS}" "${detected_archs}" PARENT_SCOPE)
-    return()
-  endif()
-
-  set(detected_archs)
-  foreach(known_arch IN LISTS known_archs)
-    if(SDL_CPU_${known_arch})
-      list(APPEND detected_archs "${known_arch}")
-    endif()
-  endforeach()
-
-  if(detected_archs)
-    set("${DETECTED_ARCHS}" "${detected_archs}" PARENT_SCOPE)
-    return()
-  endif()
-
-  set(arch_check_ARM32 "defined(__arm__) || defined(_M_ARM)")
-  set(arch_check_ARM64 "defined(__aarch64__) || defined(_M_ARM64)")
-  set(arch_check_ARM64EC "defined(_M_ARM64EC)")
-  set(arch_check_EMSCRIPTEN "defined(__EMSCRIPTEN__)")
-  set(arch_check_LOONGARCH64 "defined(__loongarch64)")
-  set(arch_check_POWERPC32 "(defined(__PPC__) || defined(__powerpc__)) && !defined(__powerpc64__)")
-  set(arch_check_POWERPC64 "defined(__PPC64__) || defined(__powerpc64__)")
-  set(arch_check_X86 "defined(__i386__) || defined(__i486__) || defined(__i586__) || defined(__i686__) ||defined( __i386) || defined(_M_IX86)")
-  set(arch_check_X64 "(defined(__amd64__) || defined(__amd64) || defined(__x86_64__) || defined(__x86_64) || defined(_M_X64) || defined(_M_AMD64)) && !defined(_M_ARM64EC)")
-
-  set(src_vars "")
-  set(src_main "")
-  foreach(known_arch IN LISTS known_archs)
-    set(detected_${known_arch} "0")
-
-    string(APPEND src_vars "
-#if ${arch_check_${known_arch}}
-#define ARCH_${known_arch} \"1\"
-#else
-#define ARCH_${known_arch} \"0\"
-#endif
-const char *arch_${known_arch} = \"INFO<${known_arch}=\" ARCH_${known_arch} \">\";
-")
-    string(APPEND src_main "
-  result += arch_${known_arch}[argc];")
-  endforeach()
-
-  set(src_arch_detect "${src_vars}
-int main(int argc, char *argv[]) {
-  (void)argv;
-  int result = 0;
-${src_main}
-  return result;
-}")
-
-  set(path_src_arch_detect "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch.c")
-  file(WRITE "${path_src_arch_detect}" "${src_arch_detect}")
-  set(path_dir_arch_detect "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch")
-  set(path_bin_arch_detect "${path_dir_arch_detect}/bin")
-
-  set(detected_archs)
-
-  set(msg "Detecting Target CPU Architecture")
-  message(STATUS "${msg}")
-
-  include(CMakePushCheckState)
-
-  set(CMAKE_TRY_COMPILE_TARGET_TYPE "STATIC_LIBRARY")
-
-  cmake_push_check_state(RESET)
-  try_compile(SDL_CPU_CHECK_ALL
-    "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch"
-    SOURCES "${path_src_arch_detect}"
-    COPY_FILE "${path_bin_arch_detect}"
-  )
-  cmake_pop_check_state()
-  if(NOT SDL_CPU_CHECK_ALL)
-    message(STATUS "${msg} - <ERROR>")
-    message(WARNING "Failed to compile source detecting the target CPU architecture")
-  else()
-    set(re "INFO<([A-Z0-9]+)=([01])>")
-    file(STRINGS "${path_bin_arch_detect}" infos REGEX "${re}")
-
-    foreach(info_arch_01 IN LISTS infos)
-      string(REGEX MATCH "${re}" A "${info_arch_01}")
-      if(NOT "${CMAKE_MATCH_1}" IN_LIST known_archs)
-        message(WARNING "Unknown architecture: \"${CMAKE_MATCH_1}\"")
-        continue()
-      endif()
-      set(arch "${CMAKE_MATCH_1}")
-      set(arch_01 "${CMAKE_MATCH_2}")
-      set(detected_${arch} "${arch_01}")
-    endforeach()
-
-    foreach(known_arch IN LISTS known_archs)
-      if(detected_${known_arch})
-        list(APPEND detected_archs ${known_arch})
-      endif()
-    endforeach()
-  endif()
-
-  if(detected_archs)
-    foreach(known_arch IN LISTS known_archs)
-      set("SDL_CPU_${known_arch}" "${detected_${known_arch}}" CACHE BOOL "Detected architecture ${known_arch}")
-    endforeach()
-    message(STATUS "${msg} - ${detected_archs}")
-  else()
-    include(CheckCSourceCompiles)
-    cmake_push_check_state(RESET)
-    foreach(known_arch IN LISTS known_archs)
-      if(NOT detected_archs)
-        set(cache_variable "SDL_CPU_${known_arch}")
-          set(test_src "
-        int main(int argc, char *argv[]) {
-        #if ${arch_check_${known_arch}}
-          return 0;
-        #else
-          choke
-        #endif
-        }
-        ")
-        check_c_source_compiles("${test_src}" "${cache_variable}")
-        if(${cache_variable})
-          set(SDL_CPU_${known_arch} "1" CACHE BOOL "Detected architecture ${known_arch}")
-          set(detected_archs ${known_arch})
-        else()
-          set(SDL_CPU_${known_arch} "0" CACHE BOOL "Detected architecture ${known_arch}")
-        endif()
-      endif()
-    endforeach()
-    cmake_pop_check_state()
-  endif()
-  set("${DETECTED_ARCHS}" "${detected_archs}" PARENT_SCOPE)
-endfunction()
+function(SDL_DetectTargetCPUArchitectures DETECTED_ARCHS)
+
+  set(known_archs EMSCRIPTEN ARM32 ARM64 ARM64EC LOONGARCH64 POWERPC32 POWERPC64 X86 X64)
+
+  if(APPLE AND CMAKE_OSX_ARCHITECTURES)
+    foreach(known_arch IN LISTS known_archs)
+      set(SDL_CPU_${known_arch} "0")
+    endforeach()
+    set(detected_archs)
+    foreach(osx_arch IN LISTS CMAKE_OSX_ARCHITECTURES)
+      if(osx_arch STREQUAL "x86_64")
+        set(SDL_CPU_X64 "1")
+        list(APPEND detected_archs "X64")
+      elseif(osx_arch STREQUAL "arm64")
+        set(SDL_CPU_ARM64 "1")
+        list(APPEND detected_archs "ARM64")
+      endif()
+    endforeach()
+    set("${DETECTED_ARCHS}" "${detected_archs}" PARENT_SCOPE)
+    return()
+  endif()
+
+  set(detected_archs)
+  foreach(known_arch IN LISTS known_archs)
+    if(SDL_CPU_${known_arch})
+      list(APPEND detected_archs "${known_arch}")
+    endif()
+  endforeach()
+
+  if(detected_archs)
+    set("${DETECTED_ARCHS}" "${detected_archs}" PARENT_SCOPE)
+    return()
+  endif()
+
+  set(arch_check_ARM32 "defined(__arm__) || defined(_M_ARM)")
+  set(arch_check_ARM64 "defined(__aarch64__) || defined(_M_ARM64)")
+  set(arch_check_ARM64EC "defined(_M_ARM64EC)")
+  set(arch_check_EMSCRIPTEN "defined(__EMSCRIPTEN__)")
+  set(arch_check_LOONGARCH64 "defined(__loongarch64)")
+  set(arch_check_POWERPC32 "(defined(__PPC__) || defined(__powerpc__)) && !defined(__powerpc64__)")
+  set(arch_check_POWERPC64 "defined(__PPC64__) || defined(__powerpc64__)")
+  set(arch_check_X86 "defined(__i386__) || defined(__i486__) || defined(__i586__) || defined(__i686__) ||defined( __i386) || defined(_M_IX86)")
+  set(arch_check_X64 "(defined(__amd64__) || defined(__amd64) || defined(__x86_64__) || defined(__x86_64) || defined(_M_X64) || defined(_M_AMD64)) && !defined(_M_ARM64EC)")
+
+  set(src_vars "")
+  set(src_main "")
+  foreach(known_arch IN LISTS known_archs)
+    set(detected_${known_arch} "0")
+
+    string(APPEND src_vars "
+#if ${arch_check_${known_arch}}
+#define ARCH_${known_arch} \"1\"
+#else
+#define ARCH_${known_arch} \"0\"
+#endif
+const char *arch_${known_arch} = \"INFO<${known_arch}=\" ARCH_${known_arch} \">\";
+")
+    string(APPEND src_main "
+  result += arch_${known_arch}[argc];")
+  endforeach()
+
+  set(src_arch_detect "${src_vars}
+int main(int argc, char *argv[]) {
+  (void)argv;
+  int result = 0;
+${src_main}
+  return result;
+}")
+
+  if(CMAKE_C_COMPILER)
+    set(ext ".c")
+  elseif(CMAKE_CXX_COMPILER)
+    set(ext ".cpp")
+  else()
+    enable_language(C)
+    set(ext ".c")
+  endif()
+  set(path_src_arch_detect "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch${ext}")
+  file(WRITE "${path_src_arch_detect}" "${src_arch_detect}")
+  set(path_dir_arch_detect "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch")
+  set(path_bin_arch_detect "${path_dir_arch_detect}/bin")
+
+  set(detected_archs)
+
+  set(msg "Detecting Target CPU Architecture")
+  message(STATUS "${msg}")
+
+  include(CMakePushCheckState)
+
+  set(CMAKE_TRY_COMPILE_TARGET_TYPE "STATIC_LIBRARY")
+
+  cmake_push_check_state(RESET)
+  try_compile(SDL_CPU_CHECK_ALL
+    "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch"
+    SOURCES "${path_src_arch_detect}"
+    COPY_FILE "${path_bin_arch_detect}"
+  )
+  cmake_pop_check_state()
+  if(NOT SDL_CPU_CHECK_ALL)
+    message(STATUS "${msg} - <ERROR>")
+    message(WARNING "Failed to compile source detecting the target CPU architecture")
+  else()
+    set(re "INFO<([A-Z0-9]+)=([01])>")
+    file(STRINGS "${path_bin_arch_detect}" infos REGEX "${re}")
+
+    foreach(info_arch_01 IN LISTS infos)
+      string(REGEX MATCH "${re}" A "${info_arch_01}")
+      if(NOT "${CMAKE_MATCH_1}" IN_LIST known_archs)
+        message(WARNING "Unknown architecture: \"${CMAKE_MATCH_1}\"")
+        continue()
+      endif()
+      set(arch "${CMAKE_MATCH_1}")
+      set(arch_01 "${CMAKE_MATCH_2}")
+      set(detected_${arch} "${arch_01}")
+    endforeach()
+
+    foreach(known_arch IN LISTS known_archs)
+      if(detected_${known_arch})
+        list(APPEND detected_archs ${known_arch})
+      endif()
+    endforeach()
+  endif()
+
+  if(detected_archs)
+    foreach(known_arch IN LISTS known_archs)
+      set("SDL_CPU_${known_arch}" "${detected_${known_arch}}" CACHE BOOL "Detected architecture ${known_arch}")
+    endforeach()
+    message(STATUS "${msg} - ${detected_archs}")
+  else()
+    include(CheckCSourceCompiles)
+    cmake_push_check_state(RESET)
+    foreach(known_arch IN LISTS known_archs)
+      if(NOT detected_archs)
+        set(cache_variable "SDL_CPU_${known_arch}")
+          set(test_src "
+        int main(int argc, char *argv[]) {
+        #if ${arch_check_${known_arch}}
+          return 0;
+        #else
+          choke
+        #endif
+        }
+        ")
+        check_c_source_compiles("${test_src}" "${cache_variable}")
+        if(${cache_variable})
+          set(SDL_CPU_${known_arch} "1" CACHE BOOL "Detected architecture ${known_arch}")
+          set(detected_archs ${known_arch})
+        else()
+          set(SDL_CPU_${known_arch} "0" CACHE BOOL "Detected architecture ${known_arch}")
+        endif()
+      endif()
+    endforeach()
+    cmake_pop_check_state()
+  endif()
+  set("${DETECTED_ARCHS}" "${detected_archs}" PARENT_SCOPE)
+endfunction()

+ 15 - 13
docs/README-android.md

@@ -126,13 +126,10 @@ Here's an explanation of the files in the Android project, so you can customize
 Using the SDL3 Android Archive (.aar)
 Using the SDL3 Android Archive (.aar)
 ================================================================================
 ================================================================================
 
 
-The `create-android-project.py` script can
-./create-android-project.py com.yourcompany.yourapp < sources.list
-
-The Android archive allows use of SDL3 in your Android project, without needing to copy any SDL c or java source.
+The Android archive allows use of SDL3 in your Android project, without needing to copy any SDL C or JAVA source into your project.
 For integration with CMake/ndk-build, it uses [prefab](https://google.github.io/prefab/).
 For integration with CMake/ndk-build, it uses [prefab](https://google.github.io/prefab/).
 
 
-Copy the archive to a `app/libs` directory of your project and add the following to `app/gradle.build`:
+Copy the archive to a `app/libs` directory in your project and add the following to `app/gradle.build`:
 ```
 ```
 android {
 android {
     /* ... */
     /* ... */
@@ -141,29 +138,34 @@ android {
     }
     }
 }
 }
 dependencies {
 dependencies {
-    implementation files('libs/@PROJECT_NAME@-@PROJECT_VERSION@.aar')
+    implementation files('libs/SDL3-X.Y.Z.aar') /* Replace with the filename of the actual SDL3-x.y.z.aar file you downloaded */
     /* ... */
     /* ... */
 }
 }
 ```
 ```
 
 
-If you're using CMake, add the following to your CMakeLists.txt:
+If you use CMake, add the following to your CMakeLists.txt:
 ```
 ```
-find_package(@PROJECT_NAME@ REQUIRED CONFIG)
-target_link_libraries(yourgame PRIVATE @PROJECT_NAME@::@PROJECT_NAME@)
+find_package(SDL3 REQUIRED CONFIG)
+target_link_libraries(yourgame PRIVATE SDL3::SDL3)
 ```
 ```
 
 
-If you're using ndk-build, add the following somewhere after `LOCAL_MODULE := yourgame` to your `Android.mk` or `Application.mk`:
+If you use ndk-build, add the following before `include $(BUILD_SHARED_LIBRARY)` to your `Android.mk`:
+```
+LOCAL_SHARED_LIBARARIES := SDL3 SDL3-Headers
+```
+And add the following at the bottom:
 ```
 ```
 # https://google.github.io/prefab/build-systems.html
 # https://google.github.io/prefab/build-systems.html
-
 # Add the prefab modules to the import path.
 # Add the prefab modules to the import path.
 $(call import-add-path,/out)
 $(call import-add-path,/out)
-
 # Import @PROJECT_NAME@ so we can depend on it.
 # Import @PROJECT_NAME@ so we can depend on it.
 $(call import-module,prefab/@PROJECT_NAME@)
 $(call import-module,prefab/@PROJECT_NAME@)
 ```
 ```
 
 
-If you want to avoid adding the complete SDL source base as a subproject, or adding the Java sources of the bindings to your Android project
+The `build-scripts/create-android-project.py` script can create a project using Android aar-chives from scratch:
+```
+build-scripts/create-android-project.py --variant aar com.yourcompany.yourapp < sources.list
+```
 
 
 Customizing your application name
 Customizing your application name
 ================================================================================
 ================================================================================

+ 8 - 8
include/build_config/SDL_revision.h.cmake

@@ -11,19 +11,19 @@
   freely, subject to the following restrictions:
   freely, subject to the following restrictions:
 
 
   1. The origin of this software must not be misrepresented; you must not
   1. The origin of this software must not be misrepresented; you must not
-  claim that you wrote the original software. If you use this software
-  in a product, an acknowledgment in the product documentation would be
-  appreciated but is not required.
+     claim that you wrote the original software. If you use this software
+     in a product, an acknowledgment in the product documentation would be
+     appreciated but is not required.
   2. Altered source versions must be plainly marked as such, and must not be
   2. Altered source versions must be plainly marked as such, and must not be
-  misrepresented as being the original software.
+     misrepresented as being the original software.
   3. This notice may not be removed or altered from any source distribution.
   3. This notice may not be removed or altered from any source distribution.
 */
 */
 
 
 /**
 /**
-*  \file SDL_revision.h
-*
-*  Header file containing the SDL revision.
-*/
+ *  \file SDL_revision.h
+ *
+ *  Header file containing the SDL revision.
+ */
 
 
 #ifndef SDL_revision_h_
 #ifndef SDL_revision_h_
 #define SDL_revision_h_
 #define SDL_revision_h_