|
@@ -1195,7 +1195,7 @@ struct VulkanRenderer
|
|
|
|
|
|
// Forward declarations
|
|
|
|
|
|
-static bool VULKAN_INTERNAL_DefragmentMemory(VulkanRenderer *renderer);
|
|
|
+static bool VULKAN_INTERNAL_DefragmentMemory(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer);
|
|
|
static bool VULKAN_INTERNAL_BeginCommandBuffer(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer);
|
|
|
static void VULKAN_ReleaseWindow(SDL_GPURenderer *driverData, SDL_Window *window);
|
|
|
static bool VULKAN_Wait(SDL_GPURenderer *driverData);
|
|
@@ -5578,6 +5578,7 @@ static void VULKAN_PopDebugGroup(
|
|
|
|
|
|
static VulkanTexture *VULKAN_INTERNAL_CreateTexture(
|
|
|
VulkanRenderer *renderer,
|
|
|
+ bool transitionToDefaultLayout,
|
|
|
const SDL_GPUTextureCreateInfo *createinfo)
|
|
|
{
|
|
|
VkResult vulkanResult;
|
|
@@ -5805,15 +5806,17 @@ static VulkanTexture *VULKAN_INTERNAL_CreateTexture(
|
|
|
&nameInfo);
|
|
|
}
|
|
|
|
|
|
- // Let's transition to the default barrier state, because for some reason Vulkan doesn't let us do that with initialLayout.
|
|
|
- VulkanCommandBuffer *barrierCommandBuffer = (VulkanCommandBuffer *)VULKAN_AcquireCommandBuffer((SDL_GPURenderer *)renderer);
|
|
|
- VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
|
|
|
- renderer,
|
|
|
- barrierCommandBuffer,
|
|
|
- VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
|
|
|
- texture);
|
|
|
- VULKAN_INTERNAL_TrackTexture(barrierCommandBuffer, texture);
|
|
|
- VULKAN_Submit((SDL_GPUCommandBuffer *)barrierCommandBuffer);
|
|
|
+ if (transitionToDefaultLayout) {
|
|
|
+ // Let's transition to the default barrier state, because for some reason Vulkan doesn't let us do that with initialLayout.
|
|
|
+ VulkanCommandBuffer *barrierCommandBuffer = (VulkanCommandBuffer *)VULKAN_AcquireCommandBuffer((SDL_GPURenderer *)renderer);
|
|
|
+ VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
|
|
|
+ renderer,
|
|
|
+ barrierCommandBuffer,
|
|
|
+ VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
|
|
|
+ texture);
|
|
|
+ VULKAN_INTERNAL_TrackTexture(barrierCommandBuffer, texture);
|
|
|
+ VULKAN_Submit((SDL_GPUCommandBuffer *)barrierCommandBuffer);
|
|
|
+ }
|
|
|
|
|
|
return texture;
|
|
|
}
|
|
@@ -5863,6 +5866,7 @@ static void VULKAN_INTERNAL_CycleActiveBuffer(
|
|
|
|
|
|
static void VULKAN_INTERNAL_CycleActiveTexture(
|
|
|
VulkanRenderer *renderer,
|
|
|
+ VulkanCommandBuffer *commandBuffer,
|
|
|
VulkanTextureContainer *container)
|
|
|
{
|
|
|
VulkanTexture *texture;
|
|
@@ -5880,8 +5884,15 @@ static void VULKAN_INTERNAL_CycleActiveTexture(
|
|
|
// No texture is available, generate a new one.
|
|
|
texture = VULKAN_INTERNAL_CreateTexture(
|
|
|
renderer,
|
|
|
+ false,
|
|
|
&container->header.info);
|
|
|
|
|
|
+ VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
|
|
|
+ renderer,
|
|
|
+ commandBuffer,
|
|
|
+ VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
|
|
|
+ texture);
|
|
|
+
|
|
|
if (!texture) {
|
|
|
return;
|
|
|
}
|
|
@@ -5945,6 +5956,7 @@ static VulkanTextureSubresource *VULKAN_INTERNAL_PrepareTextureSubresourceForWri
|
|
|
SDL_GetAtomicInt(&textureContainer->activeTexture->referenceCount) > 0) {
|
|
|
VULKAN_INTERNAL_CycleActiveTexture(
|
|
|
renderer,
|
|
|
+ commandBuffer,
|
|
|
textureContainer);
|
|
|
|
|
|
textureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
|
|
@@ -6726,6 +6738,7 @@ static SDL_GPUTexture *VULKAN_CreateTexture(
|
|
|
|
|
|
texture = VULKAN_INTERNAL_CreateTexture(
|
|
|
renderer,
|
|
|
+ true,
|
|
|
createinfo);
|
|
|
|
|
|
if (texture == NULL) {
|
|
@@ -6900,7 +6913,7 @@ static void VULKAN_INTERNAL_ReleaseBuffer(
|
|
|
renderer->buffersToDestroy[renderer->buffersToDestroyCount] = vulkanBuffer;
|
|
|
renderer->buffersToDestroyCount += 1;
|
|
|
|
|
|
- vulkanBuffer->markedForDestroy = 1;
|
|
|
+ vulkanBuffer->markedForDestroy = true;
|
|
|
vulkanBuffer->container = NULL;
|
|
|
|
|
|
SDL_UnlockMutex(renderer->disposeLock);
|
|
@@ -10417,7 +10430,7 @@ static bool VULKAN_Submit(
|
|
|
Uint32 swapchainImageIndex;
|
|
|
VulkanTextureSubresource *swapchainTextureSubresource;
|
|
|
VulkanMemorySubAllocator *allocator;
|
|
|
- bool presenting = false;
|
|
|
+ bool presenting = (vulkanCommandBuffer->presentDataCount > 0);
|
|
|
|
|
|
SDL_LockMutex(renderer->submitLock);
|
|
|
|
|
@@ -10440,6 +10453,15 @@ static bool VULKAN_Submit(
|
|
|
swapchainTextureSubresource);
|
|
|
}
|
|
|
|
|
|
+ if (presenting &&
|
|
|
+ renderer->allocationsToDefragCount > 0 &&
|
|
|
+ !renderer->defragInProgress) {
|
|
|
+ if (!VULKAN_INTERNAL_DefragmentMemory(renderer, vulkanCommandBuffer))
|
|
|
+ {
|
|
|
+ SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s", "Failed to defragment memory, likely OOM!");
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
if (!VULKAN_INTERNAL_EndCommandBuffer(renderer, vulkanCommandBuffer)) {
|
|
|
SDL_UnlockMutex(renderer->submitLock);
|
|
|
return false;
|
|
@@ -10477,11 +10499,7 @@ static bool VULKAN_Submit(
|
|
|
}
|
|
|
|
|
|
// Present, if applicable
|
|
|
- bool result = true;
|
|
|
-
|
|
|
for (Uint32 j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1) {
|
|
|
- presenting = true;
|
|
|
-
|
|
|
presentData = &vulkanCommandBuffer->presentDatas[j];
|
|
|
|
|
|
presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
|
|
@@ -10519,60 +10537,51 @@ static bool VULKAN_Submit(
|
|
|
(presentData->windowData->frameCounter + 1) % renderer->allowedFramesInFlight;
|
|
|
}
|
|
|
|
|
|
- // Check if we can perform any cleanups
|
|
|
-
|
|
|
- for (Sint32 i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
|
|
|
- vulkanResult = renderer->vkGetFenceStatus(
|
|
|
- renderer->logicalDevice,
|
|
|
- renderer->submittedCommandBuffers[i]->inFlightFence->fence);
|
|
|
+ // If presenting, check if we can perform any cleanups
|
|
|
+ if (presenting) {
|
|
|
+ for (Sint32 i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
|
|
|
+ vulkanResult = renderer->vkGetFenceStatus(
|
|
|
+ renderer->logicalDevice,
|
|
|
+ renderer->submittedCommandBuffers[i]->inFlightFence->fence);
|
|
|
|
|
|
- if (vulkanResult == VK_SUCCESS) {
|
|
|
- VULKAN_INTERNAL_CleanCommandBuffer(
|
|
|
- renderer,
|
|
|
- renderer->submittedCommandBuffers[i],
|
|
|
- false);
|
|
|
+ if (vulkanResult == VK_SUCCESS) {
|
|
|
+ VULKAN_INTERNAL_CleanCommandBuffer(
|
|
|
+ renderer,
|
|
|
+ renderer->submittedCommandBuffers[i],
|
|
|
+ false);
|
|
|
+ }
|
|
|
}
|
|
|
- }
|
|
|
|
|
|
- if (renderer->checkEmptyAllocations) {
|
|
|
- SDL_LockMutex(renderer->allocatorLock);
|
|
|
+ if (renderer->checkEmptyAllocations) {
|
|
|
+ SDL_LockMutex(renderer->allocatorLock);
|
|
|
|
|
|
- for (Uint32 i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
|
|
|
- allocator = &renderer->memoryAllocator->subAllocators[i];
|
|
|
+ for (Uint32 i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
|
|
|
+ allocator = &renderer->memoryAllocator->subAllocators[i];
|
|
|
|
|
|
- for (Sint32 j = allocator->allocationCount - 1; j >= 0; j -= 1) {
|
|
|
- if (allocator->allocations[j]->usedRegionCount == 0) {
|
|
|
- VULKAN_INTERNAL_DeallocateMemory(
|
|
|
- renderer,
|
|
|
- allocator,
|
|
|
- j);
|
|
|
+ for (Sint32 j = allocator->allocationCount - 1; j >= 0; j -= 1) {
|
|
|
+ if (allocator->allocations[j]->usedRegionCount == 0) {
|
|
|
+ VULKAN_INTERNAL_DeallocateMemory(
|
|
|
+ renderer,
|
|
|
+ allocator,
|
|
|
+ j);
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
- }
|
|
|
|
|
|
- renderer->checkEmptyAllocations = false;
|
|
|
+ renderer->checkEmptyAllocations = false;
|
|
|
|
|
|
- SDL_UnlockMutex(renderer->allocatorLock);
|
|
|
- }
|
|
|
-
|
|
|
- // Check pending destroys
|
|
|
- VULKAN_INTERNAL_PerformPendingDestroys(renderer);
|
|
|
+ SDL_UnlockMutex(renderer->allocatorLock);
|
|
|
+ }
|
|
|
|
|
|
- // Defrag!
|
|
|
- if (
|
|
|
- presenting &&
|
|
|
- renderer->allocationsToDefragCount > 0 &&
|
|
|
- !renderer->defragInProgress) {
|
|
|
- result = VULKAN_INTERNAL_DefragmentMemory(renderer);
|
|
|
+ VULKAN_INTERNAL_PerformPendingDestroys(renderer);
|
|
|
}
|
|
|
|
|
|
// Mark command buffer as submitted
|
|
|
- // This must happen after defrag, because it will try to acquire new command buffers.
|
|
|
VULKAN_INTERNAL_ReleaseCommandBuffer(vulkanCommandBuffer);
|
|
|
|
|
|
SDL_UnlockMutex(renderer->submitLock);
|
|
|
|
|
|
- return result;
|
|
|
+ return true;
|
|
|
}
|
|
|
|
|
|
static bool VULKAN_Cancel(
|
|
@@ -10599,43 +10608,28 @@ static bool VULKAN_Cancel(
|
|
|
}
|
|
|
|
|
|
static bool VULKAN_INTERNAL_DefragmentMemory(
|
|
|
- VulkanRenderer *renderer)
|
|
|
+ VulkanRenderer *renderer,
|
|
|
+ VulkanCommandBuffer *commandBuffer)
|
|
|
{
|
|
|
- VulkanMemoryAllocation *allocation;
|
|
|
- VulkanMemoryUsedRegion *currentRegion;
|
|
|
- VulkanBuffer *newBuffer;
|
|
|
- VulkanTexture *newTexture;
|
|
|
- VkBufferCopy bufferCopy;
|
|
|
- VkImageCopy imageCopy;
|
|
|
- VulkanCommandBuffer *commandBuffer;
|
|
|
- VulkanTextureSubresource *srcSubresource;
|
|
|
- VulkanTextureSubresource *dstSubresource;
|
|
|
- Uint32 i, subresourceIndex;
|
|
|
-
|
|
|
renderer->defragInProgress = 1;
|
|
|
-
|
|
|
- commandBuffer = (VulkanCommandBuffer *)VULKAN_AcquireCommandBuffer((SDL_GPURenderer *)renderer);
|
|
|
- if (commandBuffer == NULL) {
|
|
|
- return false;
|
|
|
- }
|
|
|
commandBuffer->isDefrag = 1;
|
|
|
|
|
|
SDL_LockMutex(renderer->allocatorLock);
|
|
|
|
|
|
- allocation = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1];
|
|
|
+ VulkanMemoryAllocation *allocation = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1];
|
|
|
renderer->allocationsToDefragCount -= 1;
|
|
|
|
|
|
/* For each used region in the allocation
|
|
|
* create a new resource, copy the data
|
|
|
* and re-point the resource containers
|
|
|
*/
|
|
|
- for (i = 0; i < allocation->usedRegionCount; i += 1) {
|
|
|
- currentRegion = allocation->usedRegions[i];
|
|
|
+ for (Uint32 i = 0; i < allocation->usedRegionCount; i += 1) {
|
|
|
+ VulkanMemoryUsedRegion *currentRegion = allocation->usedRegions[i];
|
|
|
|
|
|
if (currentRegion->isBuffer && !currentRegion->vulkanBuffer->markedForDestroy) {
|
|
|
currentRegion->vulkanBuffer->usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
|
|
|
|
|
- newBuffer = VULKAN_INTERNAL_CreateBuffer(
|
|
|
+ VulkanBuffer *newBuffer = VULKAN_INTERNAL_CreateBuffer(
|
|
|
renderer,
|
|
|
currentRegion->vulkanBuffer->size,
|
|
|
currentRegion->vulkanBuffer->usage,
|
|
@@ -10645,6 +10639,7 @@ static bool VULKAN_INTERNAL_DefragmentMemory(
|
|
|
|
|
|
if (newBuffer == NULL) {
|
|
|
SDL_UnlockMutex(renderer->allocatorLock);
|
|
|
+ SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s", "Failed to allocate defrag buffer!");
|
|
|
return false;
|
|
|
}
|
|
|
|
|
@@ -10663,6 +10658,7 @@ static bool VULKAN_INTERNAL_DefragmentMemory(
|
|
|
VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
|
|
|
newBuffer);
|
|
|
|
|
|
+ VkBufferCopy bufferCopy;
|
|
|
bufferCopy.srcOffset = 0;
|
|
|
bufferCopy.dstOffset = 0;
|
|
|
bufferCopy.size = currentRegion->resourceSize;
|
|
@@ -10702,20 +10698,22 @@ static bool VULKAN_INTERNAL_DefragmentMemory(
|
|
|
|
|
|
VULKAN_INTERNAL_ReleaseBuffer(renderer, currentRegion->vulkanBuffer);
|
|
|
} else if (!currentRegion->isBuffer && !currentRegion->vulkanTexture->markedForDestroy) {
|
|
|
- newTexture = VULKAN_INTERNAL_CreateTexture(
|
|
|
+ VulkanTexture *newTexture = VULKAN_INTERNAL_CreateTexture(
|
|
|
renderer,
|
|
|
+ false,
|
|
|
¤tRegion->vulkanTexture->container->header.info);
|
|
|
|
|
|
if (newTexture == NULL) {
|
|
|
SDL_UnlockMutex(renderer->allocatorLock);
|
|
|
+ SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s", "Failed to allocate defrag buffer!");
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
SDL_GPUTextureCreateInfo info = currentRegion->vulkanTexture->container->header.info;
|
|
|
- for (subresourceIndex = 0; subresourceIndex < currentRegion->vulkanTexture->subresourceCount; subresourceIndex += 1) {
|
|
|
+ for (Uint32 subresourceIndex = 0; subresourceIndex < currentRegion->vulkanTexture->subresourceCount; subresourceIndex += 1) {
|
|
|
// copy subresource if necessary
|
|
|
- srcSubresource = ¤tRegion->vulkanTexture->subresources[subresourceIndex];
|
|
|
- dstSubresource = &newTexture->subresources[subresourceIndex];
|
|
|
+ VulkanTextureSubresource *srcSubresource = ¤tRegion->vulkanTexture->subresources[subresourceIndex];
|
|
|
+ VulkanTextureSubresource *dstSubresource = &newTexture->subresources[subresourceIndex];
|
|
|
|
|
|
VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
|
|
|
renderer,
|
|
@@ -10723,12 +10721,14 @@ static bool VULKAN_INTERNAL_DefragmentMemory(
|
|
|
VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
|
|
|
srcSubresource);
|
|
|
|
|
|
- VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
|
|
|
+ VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
|
|
|
renderer,
|
|
|
commandBuffer,
|
|
|
+ VULKAN_TEXTURE_USAGE_MODE_UNINITIALIZED,
|
|
|
VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
|
|
|
dstSubresource);
|
|
|
|
|
|
+ VkImageCopy imageCopy;
|
|
|
imageCopy.srcOffset.x = 0;
|
|
|
imageCopy.srcOffset.y = 0;
|
|
|
imageCopy.srcOffset.z = 0;
|
|
@@ -10780,8 +10780,7 @@ static bool VULKAN_INTERNAL_DefragmentMemory(
|
|
|
|
|
|
SDL_UnlockMutex(renderer->allocatorLock);
|
|
|
|
|
|
- return VULKAN_Submit(
|
|
|
- (SDL_GPUCommandBuffer *)commandBuffer);
|
|
|
+ return true;
|
|
|
}
|
|
|
|
|
|
// Format Info
|