From 4c8946c0801972d45079e7e87136d846a4a7e85e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20J=2E=20Est=C3=A9banez?= Date: Fri, 21 Apr 2023 11:55:53 +0200 Subject: Fix issues with Vulkan layout transitions (cherry picked from commit 882b8692204e8db465670dbb3150a848197ad576) --- drivers/vulkan/rendering_device_vulkan.cpp | 102 +++++++++++++++-------------- drivers/vulkan/rendering_device_vulkan.h | 2 + 2 files changed, 54 insertions(+), 50 deletions(-) diff --git a/drivers/vulkan/rendering_device_vulkan.cpp b/drivers/vulkan/rendering_device_vulkan.cpp index 2166d12d31..2f01376a45 100644 --- a/drivers/vulkan/rendering_device_vulkan.cpp +++ b/drivers/vulkan/rendering_device_vulkan.cpp @@ -8115,35 +8115,14 @@ void RenderingDeviceVulkan::compute_list_dispatch_indirect(ComputeListID p_list, } void RenderingDeviceVulkan::compute_list_add_barrier(ComputeListID p_list) { -#ifdef FORCE_FULL_BARRIER - _full_barrier(true); -#else - _memory_barrier(VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, true); -#endif + uint32_t barrier_flags = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; + uint32_t access_flags = VK_ACCESS_SHADER_READ_BIT; + _compute_list_add_barrier(BARRIER_MASK_COMPUTE, barrier_flags, access_flags); } -void RenderingDeviceVulkan::compute_list_end(BitField p_post_barrier) { +void RenderingDeviceVulkan::_compute_list_add_barrier(BitField p_post_barrier, uint32_t p_barrier_flags, uint32_t p_access_flags) { ERR_FAIL_COND(!compute_list); - uint32_t barrier_flags = 0; - uint32_t access_flags = 0; - if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { - barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; - access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; - } - if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { - barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; - access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT; - } - if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { - barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; - access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT; - } - - if (barrier_flags == 0) { - barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; - } - VkImageMemoryBarrier *image_barriers = nullptr; uint32_t image_barrier_count = compute_list->state.textures_to_sampled_layout.size(); @@ -8152,27 +8131,29 @@ void RenderingDeviceVulkan::compute_list_end(BitField p_post_barrie image_barriers = (VkImageMemoryBarrier *)alloca(sizeof(VkImageMemoryBarrier) * image_barrier_count); } - uint32_t barrier_idx = 0; + image_barrier_count = 0; // We'll count how many we end up issuing. for (Texture *E : compute_list->state.textures_to_sampled_layout) { - VkImageMemoryBarrier &image_memory_barrier = image_barriers[barrier_idx++]; - image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; - image_memory_barrier.pNext = nullptr; - image_memory_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; - image_memory_barrier.dstAccessMask = access_flags; - image_memory_barrier.oldLayout = E->layout; - image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; + if (E->layout != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) { + VkImageMemoryBarrier &image_memory_barrier = image_barriers[image_barrier_count++]; + image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_memory_barrier.pNext = nullptr; + image_memory_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; + image_memory_barrier.dstAccessMask = p_access_flags; + image_memory_barrier.oldLayout = E->layout; + image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; - image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; - image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; - image_memory_barrier.image = E->image; - image_memory_barrier.subresourceRange.aspectMask = E->read_aspect_mask; - image_memory_barrier.subresourceRange.baseMipLevel = E->base_mipmap; - image_memory_barrier.subresourceRange.levelCount = E->mipmaps; - image_memory_barrier.subresourceRange.baseArrayLayer = E->base_layer; - image_memory_barrier.subresourceRange.layerCount = E->layers; + image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + image_memory_barrier.image = E->image; + image_memory_barrier.subresourceRange.aspectMask = E->read_aspect_mask; + image_memory_barrier.subresourceRange.baseMipLevel = E->base_mipmap; + image_memory_barrier.subresourceRange.levelCount = E->mipmaps; + image_memory_barrier.subresourceRange.baseArrayLayer = E->base_layer; + image_memory_barrier.subresourceRange.layerCount = E->layers; - E->layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; + E->layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; + } if (E->used_in_frame != frames_drawn) { E->used_in_transfer = false; @@ -8182,19 +8163,40 @@ void RenderingDeviceVulkan::compute_list_end(BitField p_post_barrie } } - VkMemoryBarrier mem_barrier; - mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; - mem_barrier.pNext = nullptr; - mem_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT; - mem_barrier.dstAccessMask = access_flags; - - if (image_barrier_count > 0 || p_post_barrier != BARRIER_MASK_NO_BARRIER) { - vkCmdPipelineBarrier(compute_list->command_buffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, barrier_flags, 0, 1, &mem_barrier, 0, nullptr, image_barrier_count, image_barriers); + if (p_barrier_flags) { + VkMemoryBarrier mem_barrier; + mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; + mem_barrier.pNext = nullptr; + mem_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT; + mem_barrier.dstAccessMask = p_access_flags; + vkCmdPipelineBarrier(compute_list->command_buffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, p_barrier_flags, 0, 1, &mem_barrier, 0, nullptr, image_barrier_count, image_barriers); + } else if (image_barrier_count) { + vkCmdPipelineBarrier(compute_list->command_buffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, image_barrier_count, image_barriers); } #ifdef FORCE_FULL_BARRIER _full_barrier(true); #endif +} + +void RenderingDeviceVulkan::compute_list_end(BitField p_post_barrier) { + ERR_FAIL_COND(!compute_list); + + uint32_t barrier_flags = 0; + uint32_t access_flags = 0; + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { + barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; + access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; + } + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { + barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; + access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT; + } + if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { + barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; + access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT; + } + _compute_list_add_barrier(p_post_barrier, barrier_flags, access_flags); memdelete(compute_list); compute_list = nullptr; diff --git a/drivers/vulkan/rendering_device_vulkan.h b/drivers/vulkan/rendering_device_vulkan.h index 91a09fa970..3f01895745 100644 --- a/drivers/vulkan/rendering_device_vulkan.h +++ b/drivers/vulkan/rendering_device_vulkan.h @@ -954,6 +954,8 @@ class RenderingDeviceVulkan : public RenderingDevice { ComputeList *compute_list = nullptr; + void _compute_list_add_barrier(BitField p_post_barrier, uint32_t p_barrier_flags, uint32_t p_access_flags); + /**************************/ /**** FRAME MANAGEMENT ****/ /**************************/ -- cgit v1.2.3