diff options
Diffstat (limited to 'drivers/vulkan')
-rw-r--r-- | drivers/vulkan/rendering_device_vulkan.cpp | 270 | ||||
-rw-r--r-- | drivers/vulkan/rendering_device_vulkan.h | 28 | ||||
-rw-r--r-- | drivers/vulkan/vulkan_context.cpp | 760 | ||||
-rw-r--r-- | drivers/vulkan/vulkan_context.h | 36 |
4 files changed, 661 insertions, 433 deletions
diff --git a/drivers/vulkan/rendering_device_vulkan.cpp b/drivers/vulkan/rendering_device_vulkan.cpp index f0f70b62e0..7f5bac30f1 100644 --- a/drivers/vulkan/rendering_device_vulkan.cpp +++ b/drivers/vulkan/rendering_device_vulkan.cpp @@ -47,7 +47,7 @@ static const uint32_t SMALL_ALLOCATION_MAX_SIZE = 4096; // Get the Vulkan object information and possible stage access types (bitwise OR'd with incoming values). -RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID p_buffer, VkPipelineStageFlags &r_stage_mask, VkAccessFlags &r_access_mask, uint32_t p_post_barrier) { +RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID p_buffer, VkPipelineStageFlags &r_stage_mask, VkAccessFlags &r_access_mask, BitField<BarrierMask> p_post_barrier) { Buffer *buffer = nullptr; if (vertex_buffer_owner.owns(p_buffer)) { buffer = vertex_buffer_owner.get_or_null(p_buffer); @@ -55,11 +55,11 @@ RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; r_access_mask |= VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT; if (buffer->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) { - if (p_post_barrier & BARRIER_MASK_RASTER) { + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; } - if (p_post_barrier & BARRIER_MASK_COMPUTE) { + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; r_stage_mask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; } @@ -69,20 +69,20 @@ RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID r_access_mask |= VK_ACCESS_INDEX_READ_BIT; buffer = index_buffer_owner.get_or_null(p_buffer); } else if (uniform_buffer_owner.owns(p_buffer)) { - if (p_post_barrier & BARRIER_MASK_RASTER) { + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; } - if (p_post_barrier & BARRIER_MASK_COMPUTE) { + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { r_stage_mask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; } r_access_mask |= VK_ACCESS_UNIFORM_READ_BIT; buffer = uniform_buffer_owner.get_or_null(p_buffer); } else if (texture_buffer_owner.owns(p_buffer)) { - if (p_post_barrier & BARRIER_MASK_RASTER) { + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; r_access_mask |= VK_ACCESS_SHADER_READ_BIT; } - if (p_post_barrier & BARRIER_MASK_COMPUTE) { + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { r_stage_mask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; r_access_mask |= VK_ACCESS_SHADER_READ_BIT; } @@ -90,11 +90,11 @@ RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID buffer = &texture_buffer_owner.get_or_null(p_buffer)->buffer; } else if (storage_buffer_owner.owns(p_buffer)) { buffer = storage_buffer_owner.get_or_null(p_buffer); - if (p_post_barrier & BARRIER_MASK_RASTER) { + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_COMPUTE) { + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { r_stage_mask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } @@ -1655,9 +1655,7 @@ RID RenderingDeviceVulkan::texture_create(const TextureFormat &p_format, const T image_create_info.pNext = nullptr; image_create_info.flags = 0; -#ifndef _MSC_VER -#warning TODO check for support via RenderingDevice to enable on mobile when possible -#endif + // TODO: Check for support via RenderingDevice to enable on mobile when possible. #ifndef ANDROID_ENABLED @@ -2011,7 +2009,7 @@ RID RenderingDeviceVulkan::texture_create(const TextureFormat &p_format, const T if (p_data.size()) { for (uint32_t i = 0; i < image_create_info.arrayLayers; i++) { - _texture_update(id, i, p_data[i], RD::BARRIER_MASK_ALL, true); + _texture_update(id, i, p_data[i], RD::BARRIER_MASK_ALL_BARRIERS, true); } } return id; @@ -2162,14 +2160,35 @@ RID RenderingDeviceVulkan::texture_create_from_extension(TextureType p_type, Dat texture.height = p_height; texture.depth = p_depth; texture.layers = p_layers; - texture.mipmaps = 0; // Maybe make this settable too? + texture.mipmaps = 1; texture.usage_flags = p_flags; texture.base_mipmap = 0; texture.base_layer = 0; texture.allowed_shared_formats.push_back(RD::DATA_FORMAT_R8G8B8A8_UNORM); texture.allowed_shared_formats.push_back(RD::DATA_FORMAT_R8G8B8A8_SRGB); - // Do we need to do something with texture.layout? + // Set base layout based on usage priority. + + if (texture.usage_flags & TEXTURE_USAGE_SAMPLING_BIT) { + // First priority, readable. + texture.layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; + + } else if (texture.usage_flags & TEXTURE_USAGE_STORAGE_BIT) { + // Second priority, storage. + + texture.layout = VK_IMAGE_LAYOUT_GENERAL; + + } else if (texture.usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) { + // Third priority, color or depth. + + texture.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + + } else if (texture.usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) { + texture.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + + } else { + texture.layout = VK_IMAGE_LAYOUT_GENERAL; + } if (texture.usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) { texture.read_aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT; @@ -2395,7 +2414,7 @@ RID RenderingDeviceVulkan::texture_create_shared_from_slice(const TextureView &p return id; } -Error RenderingDeviceVulkan::texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, uint32_t p_post_barrier) { +Error RenderingDeviceVulkan::texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, BitField<BarrierMask> p_post_barrier) { return _texture_update(p_texture, p_layer, p_data, p_post_barrier, false); } @@ -2415,7 +2434,7 @@ static _ALWAYS_INLINE_ void _copy_region(uint8_t const *__restrict p_src, uint8_ } } -Error RenderingDeviceVulkan::_texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, uint32_t p_post_barrier, bool p_use_setup_queue) { +Error RenderingDeviceVulkan::_texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, BitField<BarrierMask> p_post_barrier, bool p_use_setup_queue) { _THREAD_SAFE_METHOD_ ERR_FAIL_COND_V_MSG((draw_list || compute_list) && !p_use_setup_queue, ERR_INVALID_PARAMETER, @@ -2589,15 +2608,15 @@ Error RenderingDeviceVulkan::_texture_update(RID p_texture, uint32_t p_layer, co { uint32_t barrier_flags = 0; uint32_t access_flags = 0; - if (p_post_barrier & BARRIER_MASK_COMPUTE) { + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_RASTER) { + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_TRANSFER) { + if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT; } @@ -2850,7 +2869,7 @@ Size2i RenderingDeviceVulkan::texture_size(RID p_texture) { return Size2i(tex->width, tex->height); } -Error RenderingDeviceVulkan::texture_copy(RID p_from_texture, RID p_to_texture, const Vector3 &p_from, const Vector3 &p_to, const Vector3 &p_size, uint32_t p_src_mipmap, uint32_t p_dst_mipmap, uint32_t p_src_layer, uint32_t p_dst_layer, uint32_t p_post_barrier) { +Error RenderingDeviceVulkan::texture_copy(RID p_from_texture, RID p_to_texture, const Vector3 &p_from, const Vector3 &p_to, const Vector3 &p_size, uint32_t p_src_mipmap, uint32_t p_dst_mipmap, uint32_t p_src_layer, uint32_t p_dst_layer, BitField<BarrierMask> p_post_barrier) { _THREAD_SAFE_METHOD_ Texture *src_tex = texture_owner.get_or_null(p_from_texture); @@ -2975,15 +2994,15 @@ Error RenderingDeviceVulkan::texture_copy(RID p_from_texture, RID p_to_texture, uint32_t barrier_flags = 0; uint32_t access_flags = 0; - if (p_post_barrier & BARRIER_MASK_COMPUTE) { + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_RASTER) { + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_TRANSFER) { + if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT; } @@ -3045,7 +3064,7 @@ Error RenderingDeviceVulkan::texture_copy(RID p_from_texture, RID p_to_texture, return OK; } -Error RenderingDeviceVulkan::texture_resolve_multisample(RID p_from_texture, RID p_to_texture, uint32_t p_post_barrier) { +Error RenderingDeviceVulkan::texture_resolve_multisample(RID p_from_texture, RID p_to_texture, BitField<BarrierMask> p_post_barrier) { _THREAD_SAFE_METHOD_ Texture *src_tex = texture_owner.get_or_null(p_from_texture); @@ -3153,15 +3172,15 @@ Error RenderingDeviceVulkan::texture_resolve_multisample(RID p_from_texture, RID uint32_t barrier_flags = 0; uint32_t access_flags = 0; - if (p_post_barrier & BARRIER_MASK_COMPUTE) { + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_RASTER) { + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_TRANSFER) { + if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT; } @@ -3216,7 +3235,7 @@ Error RenderingDeviceVulkan::texture_resolve_multisample(RID p_from_texture, RID return OK; } -Error RenderingDeviceVulkan::texture_clear(RID p_texture, const Color &p_color, uint32_t p_base_mipmap, uint32_t p_mipmaps, uint32_t p_base_layer, uint32_t p_layers, uint32_t p_post_barrier) { +Error RenderingDeviceVulkan::texture_clear(RID p_texture, const Color &p_color, uint32_t p_base_mipmap, uint32_t p_mipmaps, uint32_t p_base_layer, uint32_t p_layers, BitField<BarrierMask> p_post_barrier) { _THREAD_SAFE_METHOD_ Texture *src_tex = texture_owner.get_or_null(p_texture); @@ -3289,15 +3308,15 @@ Error RenderingDeviceVulkan::texture_clear(RID p_texture, const Color &p_color, uint32_t barrier_flags = 0; uint32_t access_flags = 0; - if (p_post_barrier & BARRIER_MASK_COMPUTE) { + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_RASTER) { + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_TRANSFER) { + if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT; } @@ -3336,7 +3355,7 @@ Error RenderingDeviceVulkan::texture_clear(RID p_texture, const Color &p_color, return OK; } -bool RenderingDeviceVulkan::texture_is_format_supported_for_usage(DataFormat p_format, uint32_t p_usage) const { +bool RenderingDeviceVulkan::texture_is_format_supported_for_usage(DataFormat p_format, BitField<RenderingDevice::TextureUsageBits> p_usage) const { ERR_FAIL_INDEX_V(p_format, DATA_FORMAT_MAX, false); _THREAD_SAFE_METHOD_ @@ -3346,34 +3365,34 @@ bool RenderingDeviceVulkan::texture_is_format_supported_for_usage(DataFormat p_f vkGetPhysicalDeviceFormatProperties(context->get_physical_device(), vulkan_formats[p_format], &properties); VkFormatFeatureFlags flags; - if (p_usage & TEXTURE_USAGE_CPU_READ_BIT) { + if (p_usage.has_flag(TEXTURE_USAGE_CPU_READ_BIT)) { flags = properties.linearTilingFeatures; } else { flags = properties.optimalTilingFeatures; } - if (p_usage & TEXTURE_USAGE_SAMPLING_BIT && !(flags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) { + if (p_usage.has_flag(TEXTURE_USAGE_SAMPLING_BIT) && !(flags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) { return false; } - if (p_usage & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT && !(flags & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) { + if (p_usage.has_flag(TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) && !(flags & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) { return false; } - if (p_usage & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT && !(flags & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) { + if (p_usage.has_flag(TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(flags & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) { return false; } - if (p_usage & TEXTURE_USAGE_STORAGE_BIT && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) { + if (p_usage.has_flag(TEXTURE_USAGE_STORAGE_BIT) && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) { return false; } - if (p_usage & TEXTURE_USAGE_STORAGE_ATOMIC_BIT && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT)) { + if (p_usage.has_flag(TEXTURE_USAGE_STORAGE_ATOMIC_BIT) && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT)) { return false; } // Validation via VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR fails if VRS attachment is not supported. - if (p_usage & TEXTURE_USAGE_VRS_ATTACHMENT_BIT && p_format != DATA_FORMAT_R8_UINT) { + if (p_usage.has_flag(TEXTURE_USAGE_VRS_ATTACHMENT_BIT) && p_format != DATA_FORMAT_R8_UINT) { return false; } @@ -3403,6 +3422,16 @@ VkRenderPass RenderingDeviceVulkan::_render_pass_create(const Vector<AttachmentF LocalVector<int32_t> attachment_last_pass; attachment_last_pass.resize(p_attachments.size()); + if (p_view_count > 1) { + const VulkanContext::MultiviewCapabilities capabilities = context->get_multiview_capabilities(); + + // This only works with multiview! + ERR_FAIL_COND_V_MSG(!capabilities.is_supported, VK_NULL_HANDLE, "Multiview not supported"); + + // Make sure we limit this to the number of views we support. + ERR_FAIL_COND_V_MSG(p_view_count > capabilities.max_view_count, VK_NULL_HANDLE, "Hardware does not support requested number of views for Multiview render pass"); + } + // These are only used if we use multiview but we need to define them in scope. const uint32_t view_mask = (1 << p_view_count) - 1; const uint32_t correlation_mask = (1 << p_view_count) - 1; @@ -3703,7 +3732,7 @@ VkRenderPass RenderingDeviceVulkan::_render_pass_create(const Vector<AttachmentF reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; attachment_last_pass[attachment] = i; } - reference.aspectMask = 0; + reference.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; color_references.push_back(reference); } @@ -3725,7 +3754,7 @@ VkRenderPass RenderingDeviceVulkan::_render_pass_create(const Vector<AttachmentF reference.layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; attachment_last_pass[attachment] = i; } - reference.aspectMask = 0; // TODO: We need to set this here, possibly VK_IMAGE_ASPECT_COLOR_BIT? + reference.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; input_references.push_back(reference); } @@ -3754,7 +3783,7 @@ VkRenderPass RenderingDeviceVulkan::_render_pass_create(const Vector<AttachmentF reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; // VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL attachment_last_pass[attachment] = i; } - reference.aspectMask = 0; + reference.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; resolve_references.push_back(reference); } @@ -3769,7 +3798,7 @@ VkRenderPass RenderingDeviceVulkan::_render_pass_create(const Vector<AttachmentF ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, VK_NULL_HANDLE, "Invalid framebuffer depth format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass."); depth_stencil_reference.attachment = attachment_remap[attachment]; depth_stencil_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; - depth_stencil_reference.aspectMask = 0; + depth_stencil_reference.aspectMask = VK_IMAGE_ASPECT_NONE; attachment_last_pass[attachment] = i; if (is_multisample_first) { @@ -3795,9 +3824,9 @@ VkRenderPass RenderingDeviceVulkan::_render_pass_create(const Vector<AttachmentF vrs_reference.pNext = nullptr; vrs_reference.attachment = attachment_remap[attachment]; vrs_reference.layout = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR; - vrs_reference.aspectMask = 0; + vrs_reference.aspectMask = VK_IMAGE_ASPECT_NONE; - Size2i texel_size = context->get_vrs_capabilities().max_texel_size; + Size2i texel_size = context->get_vrs_capabilities().texel_size; VkFragmentShadingRateAttachmentInfoKHR &vrs_attachment_info = vrs_attachment_info_array[i]; vrs_attachment_info.sType = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR; @@ -3936,16 +3965,9 @@ VkRenderPass RenderingDeviceVulkan::_render_pass_create(const Vector<AttachmentF Vector<uint32_t> view_masks; VkRenderPassMultiviewCreateInfo render_pass_multiview_create_info; - if (p_view_count > 1) { - // This may no longer be needed with the new settings already including this. - - const VulkanContext::MultiviewCapabilities capabilities = context->get_multiview_capabilities(); - - // For now this only works with multiview! - ERR_FAIL_COND_V_MSG(!capabilities.is_supported, VK_NULL_HANDLE, "Multiview not supported"); - - // Make sure we limit this to the number of views we support. - ERR_FAIL_COND_V_MSG(p_view_count > capabilities.max_view_count, VK_NULL_HANDLE, "Hardware does not support requested number of views for Multiview render pass"); + if ((p_view_count > 1) && !context->supports_renderpass2()) { + // This is only required when using vkCreateRenderPass, we add it if vkCreateRenderPass2KHR is not supported + // resulting this in being passed to our vkCreateRenderPass fallback. // Set view masks for each subpass. for (uint32_t i = 0; i < subpasses.size(); i++) { @@ -4356,7 +4378,7 @@ RenderingDevice::VertexFormatID RenderingDeviceVulkan::vertex_format_create(cons return id; } -RID RenderingDeviceVulkan::vertex_array_create(uint32_t p_vertex_count, VertexFormatID p_vertex_format, const Vector<RID> &p_src_buffers) { +RID RenderingDeviceVulkan::vertex_array_create(uint32_t p_vertex_count, VertexFormatID p_vertex_format, const Vector<RID> &p_src_buffers, const Vector<uint64_t> &p_offsets) { _THREAD_SAFE_METHOD_ ERR_FAIL_COND_V(!vertex_formats.has(p_vertex_format), RID()); @@ -4370,6 +4392,13 @@ RID RenderingDeviceVulkan::vertex_array_create(uint32_t p_vertex_count, VertexFo VertexArray vertex_array; + if (p_offsets.is_empty()) { + vertex_array.offsets.resize_zeroed(p_src_buffers.size()); + } else { + ERR_FAIL_COND_V(p_offsets.size() != p_src_buffers.size(), RID()); + vertex_array.offsets = p_offsets; + } + vertex_array.vertex_count = p_vertex_count; vertex_array.description = p_vertex_format; vertex_array.max_instances_allowed = 0xFFFFFFFF; // By default as many as you want. @@ -4401,7 +4430,6 @@ RID RenderingDeviceVulkan::vertex_array_create(uint32_t p_vertex_count, VertexFo } vertex_array.buffers.push_back(buffer->buffer); - vertex_array.offsets.push_back(0); // Offset unused, but passing anyway. } RID id = vertex_array_owner.make_rid(vertex_array); @@ -5172,9 +5200,9 @@ Vector<uint8_t> RenderingDeviceVulkan::shader_compile_binary_from_spirv(const Ve uint32_t offset = 0; uint8_t *binptr = ret.ptrw(); binptr[0] = 'G'; - binptr[1] = 'V'; + binptr[1] = 'S'; binptr[2] = 'B'; - binptr[3] = 'D'; // Godot vulkan binary data. + binptr[3] = 'D'; // Godot Shader Binary Data. offset += 4; encode_uint32(SHADER_BINARY_VERSION, binptr + offset); offset += sizeof(uint32_t); @@ -5235,7 +5263,7 @@ RID RenderingDeviceVulkan::shader_create_from_bytecode(const Vector<uint8_t> &p_ uint32_t read_offset = 0; // Consistency check. ERR_FAIL_COND_V(binsize < sizeof(uint32_t) * 3 + sizeof(RenderingDeviceVulkanShaderBinaryData), RID()); - ERR_FAIL_COND_V(binptr[0] != 'G' || binptr[1] != 'V' || binptr[2] != 'B' || binptr[3] != 'D', RID()); + ERR_FAIL_COND_V(binptr[0] != 'G' || binptr[1] != 'S' || binptr[2] != 'B' || binptr[3] != 'D', RID()); uint32_t bin_version = decode_uint32(binptr + 4); ERR_FAIL_COND_V(bin_version != SHADER_BINARY_VERSION, RID()); @@ -6293,7 +6321,7 @@ void RenderingDeviceVulkan::uniform_set_set_invalidation_callback(RID p_uniform_ us->invalidated_callback_userdata = p_userdata; } -Error RenderingDeviceVulkan::buffer_update(RID p_buffer, uint32_t p_offset, uint32_t p_size, const void *p_data, uint32_t p_post_barrier) { +Error RenderingDeviceVulkan::buffer_update(RID p_buffer, uint32_t p_offset, uint32_t p_size, const void *p_data, BitField<BarrierMask> p_post_barrier) { _THREAD_SAFE_METHOD_ ERR_FAIL_COND_V_MSG(draw_list, ERR_INVALID_PARAMETER, @@ -6303,7 +6331,7 @@ Error RenderingDeviceVulkan::buffer_update(RID p_buffer, uint32_t p_offset, uint VkPipelineStageFlags dst_stage_mask = 0; VkAccessFlags dst_access = 0; - if (p_post_barrier & BARRIER_MASK_TRANSFER) { + if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { // Protect subsequent updates. dst_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT; dst_access = VK_ACCESS_TRANSFER_WRITE_BIT; @@ -6339,7 +6367,7 @@ Error RenderingDeviceVulkan::buffer_update(RID p_buffer, uint32_t p_offset, uint return err; } -Error RenderingDeviceVulkan::buffer_clear(RID p_buffer, uint32_t p_offset, uint32_t p_size, uint32_t p_post_barrier) { +Error RenderingDeviceVulkan::buffer_clear(RID p_buffer, uint32_t p_offset, uint32_t p_size, BitField<BarrierMask> p_post_barrier) { _THREAD_SAFE_METHOD_ ERR_FAIL_COND_V_MSG((p_size % 4) != 0, ERR_INVALID_PARAMETER, @@ -6351,7 +6379,7 @@ Error RenderingDeviceVulkan::buffer_clear(RID p_buffer, uint32_t p_offset, uint3 VkPipelineStageFlags dst_stage_mask = 0; VkAccessFlags dst_access = 0; - if (p_post_barrier & BARRIER_MASK_TRANSFER) { + if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { // Protect subsequent updates. dst_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT; dst_access = VK_ACCESS_TRANSFER_WRITE_BIT; @@ -6390,7 +6418,7 @@ Vector<uint8_t> RenderingDeviceVulkan::buffer_get_data(RID p_buffer) { VkPipelineShaderStageCreateFlags src_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT; VkAccessFlags src_access_mask = VK_ACCESS_TRANSFER_WRITE_BIT; // Get the vulkan buffer and the potential stage/access possible. - Buffer *buffer = _get_buffer_from_owner(p_buffer, src_stage_mask, src_access_mask, BARRIER_MASK_ALL); + Buffer *buffer = _get_buffer_from_owner(p_buffer, src_stage_mask, src_access_mask, BARRIER_MASK_ALL_BARRIERS); if (!buffer) { ERR_FAIL_V_MSG(Vector<uint8_t>(), "Buffer is either invalid or this type of buffer can't be retrieved. Only Index and Vertex buffers allow retrieving."); } @@ -6561,7 +6589,7 @@ RID RenderingDeviceVulkan::render_pipeline_create(RID p_shader, FramebufferForma ERR_FAIL_INDEX_V(p_rasterization_state.cull_mode, 3, RID()); rasterization_state_create_info.cullMode = cull_mode[p_rasterization_state.cull_mode]; rasterization_state_create_info.frontFace = (p_rasterization_state.front_face == POLYGON_FRONT_FACE_CLOCKWISE ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE); - rasterization_state_create_info.depthBiasEnable = p_rasterization_state.depth_bias_enable; + rasterization_state_create_info.depthBiasEnable = p_rasterization_state.depth_bias_enabled; rasterization_state_create_info.depthBiasConstantFactor = p_rasterization_state.depth_bias_constant_factor; rasterization_state_create_info.depthBiasClamp = p_rasterization_state.depth_bias_clamp; rasterization_state_create_info.depthBiasSlopeFactor = p_rasterization_state.depth_bias_slope_factor; @@ -7262,12 +7290,12 @@ Error RenderingDeviceVulkan::_draw_list_render_pass_begin(Framebuffer *framebuff return OK; } -void RenderingDeviceVulkan::_draw_list_insert_clear_region(DrawList *draw_list, Framebuffer *framebuffer, Point2i viewport_offset, Point2i viewport_size, bool p_clear_color, const Vector<Color> &p_clear_colors, bool p_clear_depth, float p_depth, uint32_t p_stencil) { +void RenderingDeviceVulkan::_draw_list_insert_clear_region(DrawList *p_draw_list, Framebuffer *p_framebuffer, Point2i p_viewport_offset, Point2i p_viewport_size, bool p_clear_color, const Vector<Color> &p_clear_colors, bool p_clear_depth, float p_depth, uint32_t p_stencil) { Vector<VkClearAttachment> clear_attachments; int color_index = 0; int texture_index = 0; - for (int i = 0; i < framebuffer->texture_ids.size(); i++) { - Texture *texture = texture_owner.get_or_null(framebuffer->texture_ids[i]); + for (int i = 0; i < p_framebuffer->texture_ids.size(); i++) { + Texture *texture = texture_owner.get_or_null(p_framebuffer->texture_ids[i]); if (!texture) { texture_index++; @@ -7300,12 +7328,12 @@ void RenderingDeviceVulkan::_draw_list_insert_clear_region(DrawList *draw_list, VkClearRect cr; cr.baseArrayLayer = 0; cr.layerCount = 1; - cr.rect.offset.x = viewport_offset.x; - cr.rect.offset.y = viewport_offset.y; - cr.rect.extent.width = viewport_size.width; - cr.rect.extent.height = viewport_size.height; + cr.rect.offset.x = p_viewport_offset.x; + cr.rect.offset.y = p_viewport_offset.y; + cr.rect.extent.width = p_viewport_size.width; + cr.rect.extent.height = p_viewport_size.height; - vkCmdClearAttachments(draw_list->command_buffer, clear_attachments.size(), clear_attachments.ptr(), 1, &cr); + vkCmdClearAttachments(p_draw_list->command_buffer, clear_attachments.size(), clear_attachments.ptr(), 1, &cr); } RenderingDevice::DrawListID RenderingDeviceVulkan::draw_list_begin(RID p_framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_color_values, float p_clear_depth, uint32_t p_clear_stencil, const Rect2 &p_region, const Vector<RID> &p_storage_textures) { @@ -7729,7 +7757,7 @@ void RenderingDeviceVulkan::draw_list_bind_index_array(DrawListID p_list, RID p_ dl->validation.index_array_size = index_array->indices; dl->validation.index_array_offset = index_array->offset; - vkCmdBindIndexBuffer(dl->command_buffer, index_array->buffer, index_array->offset, index_array->index_type); + vkCmdBindIndexBuffer(dl->command_buffer, index_array->buffer, 0, index_array->index_type); } void RenderingDeviceVulkan::draw_list_set_line_width(DrawListID p_list, float p_width) { @@ -8062,7 +8090,7 @@ void RenderingDeviceVulkan::_draw_list_free(Rect2i *r_last_viewport) { _THREAD_SAFE_UNLOCK_ } -void RenderingDeviceVulkan::draw_list_end(uint32_t p_post_barrier) { +void RenderingDeviceVulkan::draw_list_end(BitField<BarrierMask> p_post_barrier) { _THREAD_SAFE_METHOD_ ERR_FAIL_COND_MSG(!draw_list, "Immediate draw list is already inactive."); @@ -8084,15 +8112,15 @@ void RenderingDeviceVulkan::draw_list_end(uint32_t p_post_barrier) { uint32_t barrier_flags = 0; uint32_t access_flags = 0; - if (p_post_barrier & BARRIER_MASK_COMPUTE) { + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_RASTER) { + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT /*| VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT*/; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT /*| VK_ACCESS_INDIRECT_COMMAND_READ_BIT*/; } - if (p_post_barrier & BARRIER_MASK_TRANSFER) { + if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT; } @@ -8570,20 +8598,20 @@ void RenderingDeviceVulkan::compute_list_add_barrier(ComputeListID p_list) { #endif } -void RenderingDeviceVulkan::compute_list_end(uint32_t p_post_barrier) { +void RenderingDeviceVulkan::compute_list_end(BitField<BarrierMask> p_post_barrier) { ERR_FAIL_COND(!compute_list); uint32_t barrier_flags = 0; uint32_t access_flags = 0; - if (p_post_barrier & BARRIER_MASK_COMPUTE) { + if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) { barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; } - if (p_post_barrier & BARRIER_MASK_RASTER) { + if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) { barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT; } - if (p_post_barrier & BARRIER_MASK_TRANSFER) { + if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) { barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT; } @@ -8651,43 +8679,45 @@ void RenderingDeviceVulkan::compute_list_end(uint32_t p_post_barrier) { _THREAD_SAFE_UNLOCK_ } -void RenderingDeviceVulkan::barrier(uint32_t p_from, uint32_t p_to) { +void RenderingDeviceVulkan::barrier(BitField<BarrierMask> p_from, BitField<BarrierMask> p_to) { uint32_t src_barrier_flags = 0; uint32_t src_access_flags = 0; - if (p_from & BARRIER_MASK_COMPUTE) { - src_barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; - src_access_flags |= VK_ACCESS_SHADER_WRITE_BIT; - } - if (p_from & BARRIER_MASK_RASTER) { - src_barrier_flags |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; - src_access_flags |= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - } - if (p_from & BARRIER_MASK_TRANSFER) { - src_barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; - src_access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT; - } if (p_from == 0) { src_barrier_flags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + } else { + if (p_from.has_flag(BARRIER_MASK_COMPUTE)) { + src_barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; + src_access_flags |= VK_ACCESS_SHADER_WRITE_BIT; + } + if (p_from.has_flag(BARRIER_MASK_RASTER)) { + src_barrier_flags |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; + src_access_flags |= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + } + if (p_from.has_flag(BARRIER_MASK_TRANSFER)) { + src_barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; + src_access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT; + } } uint32_t dst_barrier_flags = 0; uint32_t dst_access_flags = 0; - if (p_to & BARRIER_MASK_COMPUTE) { - dst_barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; - dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; - } - if (p_to & BARRIER_MASK_RASTER) { - dst_barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; - dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT; - } - if (p_to & BARRIER_MASK_TRANSFER) { - dst_barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; - dst_access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT; - } if (p_to == 0) { dst_barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + } else { + if (p_to.has_flag(BARRIER_MASK_COMPUTE)) { + dst_barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; + dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; + } + if (p_to.has_flag(BARRIER_MASK_RASTER)) { + dst_barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; + dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT; + } + if (p_to.has_flag(BARRIER_MASK_TRANSFER)) { + dst_barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; + dst_access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT; + } } _memory_barrier(src_barrier_flags, dst_barrier_flags, src_access_flags, dst_access_flags, true); @@ -9164,7 +9194,7 @@ void RenderingDeviceVulkan::_free_pending_resources(int p_frame) { Texture *texture = &frames[p_frame].textures_to_dispose_of.front()->get(); if (texture->bound) { - WARN_PRINT("Deleted a texture while it was bound.."); + WARN_PRINT("Deleted a texture while it was bound."); } vkDestroyImageView(device, texture->view, nullptr); if (texture->owner.is_null()) { @@ -9365,12 +9395,10 @@ void RenderingDeviceVulkan::initialize(VulkanContext *p_context, bool p_local_de } } - // NOTE: If adding new project settings here, also duplicate their definition in - // rendering_server.cpp for headless doctool. - staging_buffer_block_size = GLOBAL_DEF("rendering/rendering_device/staging_buffer/block_size_kb", 256); + staging_buffer_block_size = GLOBAL_GET("rendering/rendering_device/staging_buffer/block_size_kb"); staging_buffer_block_size = MAX(4u, staging_buffer_block_size); staging_buffer_block_size *= 1024; // Kb -> bytes. - staging_buffer_max_size = GLOBAL_DEF("rendering/rendering_device/staging_buffer/max_size_mb", 128); + staging_buffer_max_size = GLOBAL_GET("rendering/rendering_device/staging_buffer/max_size_mb"); staging_buffer_max_size = MAX(1u, staging_buffer_max_size); staging_buffer_max_size *= 1024 * 1024; @@ -9378,7 +9406,7 @@ void RenderingDeviceVulkan::initialize(VulkanContext *p_context, bool p_local_de // Validate enough blocks. staging_buffer_max_size = staging_buffer_block_size * 4; } - texture_upload_region_size_px = GLOBAL_DEF("rendering/rendering_device/staging_buffer/texture_upload_region_size_px", 64); + texture_upload_region_size_px = GLOBAL_GET("rendering/rendering_device/staging_buffer/texture_upload_region_size_px"); texture_upload_region_size_px = nearest_power_of_2_templated(texture_upload_region_size_px); frames_drawn = frame_count; // Start from frame count, so everything else is immediately old. @@ -9393,7 +9421,7 @@ void RenderingDeviceVulkan::initialize(VulkanContext *p_context, bool p_local_de ERR_CONTINUE(err != OK); } - max_descriptors_per_pool = GLOBAL_DEF("rendering/rendering_device/descriptor_pools/max_descriptors_per_pool", 64); + max_descriptors_per_pool = GLOBAL_GET("rendering/rendering_device/vulkan/max_descriptors_per_pool"); // Check to make sure DescriptorPoolKey is good. static_assert(sizeof(uint64_t) * 3 >= UNIFORM_TYPE_MAX * sizeof(uint16_t)); @@ -9688,6 +9716,10 @@ uint64_t RenderingDeviceVulkan::limit_get(Limit p_limit) const { return limits.maxComputeWorkGroupSize[1]; case LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_Z: return limits.maxComputeWorkGroupSize[2]; + case LIMIT_MAX_VIEWPORT_DIMENSIONS_X: + return limits.maxViewportDimensions[0]; + case LIMIT_MAX_VIEWPORT_DIMENSIONS_Y: + return limits.maxViewportDimensions[1]; case LIMIT_SUBGROUP_SIZE: { VulkanContext::SubgroupCapabilities subgroup_capabilities = context->get_subgroup_capabilities(); return subgroup_capabilities.size; @@ -9700,6 +9732,12 @@ uint64_t RenderingDeviceVulkan::limit_get(Limit p_limit) const { VulkanContext::SubgroupCapabilities subgroup_capabilities = context->get_subgroup_capabilities(); return subgroup_capabilities.supported_operations_flags_rd(); } + case LIMIT_VRS_TEXEL_WIDTH: { + return context->get_vrs_capabilities().texel_size.x; + } + case LIMIT_VRS_TEXEL_HEIGHT: { + return context->get_vrs_capabilities().texel_size.y; + } default: ERR_FAIL_V(0); } diff --git a/drivers/vulkan/rendering_device_vulkan.h b/drivers/vulkan/rendering_device_vulkan.h index abec1b0e1b..c6e1830e90 100644 --- a/drivers/vulkan/rendering_device_vulkan.h +++ b/drivers/vulkan/rendering_device_vulkan.h @@ -162,7 +162,7 @@ class RenderingDeviceVulkan : public RenderingDevice { uint32_t texture_upload_region_size_px = 0; Vector<uint8_t> _texture_get_data_from_image(Texture *tex, VkImage p_image, VmaAllocation p_allocation, uint32_t p_layer, bool p_2d = false); - Error _texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, uint32_t p_post_barrier, bool p_use_setup_queue); + Error _texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, BitField<BarrierMask> p_post_barrier, bool p_use_setup_queue); /*****************/ /**** SAMPLER ****/ @@ -905,11 +905,11 @@ class RenderingDeviceVulkan : public RenderingDevice { bool draw_list_unbind_color_textures = false; bool draw_list_unbind_depth_textures = false; - void _draw_list_insert_clear_region(DrawList *draw_list, Framebuffer *framebuffer, Point2i viewport_offset, Point2i viewport_size, bool p_clear_color, const Vector<Color> &p_clear_colors, bool p_clear_depth, float p_depth, uint32_t p_stencil); + void _draw_list_insert_clear_region(DrawList *p_draw_list, Framebuffer *p_framebuffer, Point2i p_viewport_offset, Point2i p_viewport_size, bool p_clear_color, const Vector<Color> &p_clear_colors, bool p_clear_depth, float p_depth, uint32_t p_stencil); Error _draw_list_setup_framebuffer(Framebuffer *p_framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, VkFramebuffer *r_framebuffer, VkRenderPass *r_render_pass, uint32_t *r_subpass_count); Error _draw_list_render_pass_begin(Framebuffer *framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_colors, float p_clear_depth, uint32_t p_clear_stencil, Point2i viewport_offset, Point2i viewport_size, VkFramebuffer vkframebuffer, VkRenderPass render_pass, VkCommandBuffer command_buffer, VkSubpassContents subpass_contents, const Vector<RID> &p_storage_textures); _FORCE_INLINE_ DrawList *_get_draw_list_ptr(DrawListID p_id); - Buffer *_get_buffer_from_owner(RID p_buffer, VkPipelineStageFlags &dst_stage_mask, VkAccessFlags &dst_access, uint32_t p_post_barrier); + Buffer *_get_buffer_from_owner(RID p_buffer, VkPipelineStageFlags &dst_stage_mask, VkAccessFlags &dst_access, BitField<BarrierMask> p_post_barrier); Error _draw_list_allocate(const Rect2i &p_viewport, uint32_t p_splits, uint32_t p_subpass); void _draw_list_free(Rect2i *r_last_viewport = nullptr); @@ -1052,17 +1052,17 @@ public: virtual RID texture_create_from_extension(TextureType p_type, DataFormat p_format, TextureSamples p_samples, uint64_t p_flags, uint64_t p_image, uint64_t p_width, uint64_t p_height, uint64_t p_depth, uint64_t p_layers); virtual RID texture_create_shared_from_slice(const TextureView &p_view, RID p_with_texture, uint32_t p_layer, uint32_t p_mipmap, uint32_t p_mipmaps = 1, TextureSliceType p_slice_type = TEXTURE_SLICE_2D); - virtual Error texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, uint32_t p_post_barrier = BARRIER_MASK_ALL); + virtual Error texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, BitField<BarrierMask> p_post_barrier = BARRIER_MASK_ALL_BARRIERS); virtual Vector<uint8_t> texture_get_data(RID p_texture, uint32_t p_layer); - virtual bool texture_is_format_supported_for_usage(DataFormat p_format, uint32_t p_usage) const; + virtual bool texture_is_format_supported_for_usage(DataFormat p_format, BitField<RenderingDevice::TextureUsageBits> p_usage) const; virtual bool texture_is_shared(RID p_texture); virtual bool texture_is_valid(RID p_texture); virtual Size2i texture_size(RID p_texture); - virtual Error texture_copy(RID p_from_texture, RID p_to_texture, const Vector3 &p_from, const Vector3 &p_to, const Vector3 &p_size, uint32_t p_src_mipmap, uint32_t p_dst_mipmap, uint32_t p_src_layer, uint32_t p_dst_layer, uint32_t p_post_barrier = BARRIER_MASK_ALL); - virtual Error texture_clear(RID p_texture, const Color &p_color, uint32_t p_base_mipmap, uint32_t p_mipmaps, uint32_t p_base_layer, uint32_t p_layers, uint32_t p_post_barrier = BARRIER_MASK_ALL); - virtual Error texture_resolve_multisample(RID p_from_texture, RID p_to_texture, uint32_t p_post_barrier = BARRIER_MASK_ALL); + virtual Error texture_copy(RID p_from_texture, RID p_to_texture, const Vector3 &p_from, const Vector3 &p_to, const Vector3 &p_size, uint32_t p_src_mipmap, uint32_t p_dst_mipmap, uint32_t p_src_layer, uint32_t p_dst_layer, BitField<BarrierMask> p_post_barrier = BARRIER_MASK_ALL_BARRIERS); + virtual Error texture_clear(RID p_texture, const Color &p_color, uint32_t p_base_mipmap, uint32_t p_mipmaps, uint32_t p_base_layer, uint32_t p_layers, BitField<BarrierMask> p_post_barrier = BARRIER_MASK_ALL_BARRIERS); + virtual Error texture_resolve_multisample(RID p_from_texture, RID p_to_texture, BitField<BarrierMask> p_post_barrier = BARRIER_MASK_ALL_BARRIERS); /*********************/ /**** FRAMEBUFFER ****/ @@ -1095,7 +1095,7 @@ public: // Internally reference counted, this ID is warranted to be unique for the same description, but needs to be freed as many times as it was allocated. virtual VertexFormatID vertex_format_create(const Vector<VertexAttribute> &p_vertex_formats); - virtual RID vertex_array_create(uint32_t p_vertex_count, VertexFormatID p_vertex_format, const Vector<RID> &p_src_buffers); + virtual RID vertex_array_create(uint32_t p_vertex_count, VertexFormatID p_vertex_format, const Vector<RID> &p_src_buffers, const Vector<uint64_t> &p_offsets = Vector<uint64_t>()); virtual RID index_buffer_create(uint32_t p_size_indices, IndexBufferFormat p_format, const Vector<uint8_t> &p_data = Vector<uint8_t>(), bool p_use_restart_indices = false); @@ -1124,8 +1124,8 @@ public: virtual bool uniform_set_is_valid(RID p_uniform_set); virtual void uniform_set_set_invalidation_callback(RID p_uniform_set, InvalidationCallback p_callback, void *p_userdata); - virtual Error buffer_update(RID p_buffer, uint32_t p_offset, uint32_t p_size, const void *p_data, uint32_t p_post_barrier = BARRIER_MASK_ALL); // Works for any buffer. - virtual Error buffer_clear(RID p_buffer, uint32_t p_offset, uint32_t p_size, uint32_t p_post_barrier = BARRIER_MASK_ALL); + virtual Error buffer_update(RID p_buffer, uint32_t p_offset, uint32_t p_size, const void *p_data, BitField<BarrierMask> p_post_barrier = BARRIER_MASK_ALL_BARRIERS); // Works for any buffer. + virtual Error buffer_clear(RID p_buffer, uint32_t p_offset, uint32_t p_size, BitField<BarrierMask> p_post_barrier = BARRIER_MASK_ALL_BARRIERS); virtual Vector<uint8_t> buffer_get_data(RID p_buffer); /*************************/ @@ -1176,7 +1176,7 @@ public: virtual DrawListID draw_list_switch_to_next_pass(); virtual Error draw_list_switch_to_next_pass_split(uint32_t p_splits, DrawListID *r_split_ids); - virtual void draw_list_end(uint32_t p_post_barrier = BARRIER_MASK_ALL); + virtual void draw_list_end(BitField<BarrierMask> p_post_barrier = BARRIER_MASK_ALL_BARRIERS); /***********************/ /**** COMPUTE LISTS ****/ @@ -1191,9 +1191,9 @@ public: virtual void compute_list_dispatch(ComputeListID p_list, uint32_t p_x_groups, uint32_t p_y_groups, uint32_t p_z_groups); virtual void compute_list_dispatch_threads(ComputeListID p_list, uint32_t p_x_threads, uint32_t p_y_threads, uint32_t p_z_threads); virtual void compute_list_dispatch_indirect(ComputeListID p_list, RID p_buffer, uint32_t p_offset); - virtual void compute_list_end(uint32_t p_post_barrier = BARRIER_MASK_ALL); + virtual void compute_list_end(BitField<BarrierMask> p_post_barrier = BARRIER_MASK_ALL_BARRIERS); - virtual void barrier(uint32_t p_from = BARRIER_MASK_ALL, uint32_t p_to = BARRIER_MASK_ALL); + virtual void barrier(BitField<BarrierMask> p_from = BARRIER_MASK_ALL_BARRIERS, BitField<BarrierMask> p_to = BARRIER_MASK_ALL_BARRIERS); virtual void full_barrier(); /**************/ diff --git a/drivers/vulkan/vulkan_context.cpp b/drivers/vulkan/vulkan_context.cpp index e64b0c4a84..028c7dca6f 100644 --- a/drivers/vulkan/vulkan_context.cpp +++ b/drivers/vulkan/vulkan_context.cpp @@ -48,15 +48,119 @@ VulkanHooks *VulkanContext::vulkan_hooks = nullptr; -VkResult VulkanContext::vkCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) { - if (fpCreateRenderPass2KHR == nullptr) { - fpCreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR)vkGetInstanceProcAddr(inst, "vkCreateRenderPass2KHR"); +Vector<VkAttachmentReference> VulkanContext::_convert_VkAttachmentReference2(uint32_t p_count, const VkAttachmentReference2 *p_refs) { + Vector<VkAttachmentReference> att_refs; + + if (p_refs != nullptr) { + for (uint32_t i = 0; i < p_count; i++) { + // We lose aspectMask in this conversion but we don't use it currently. + + VkAttachmentReference ref = { + p_refs[i].attachment, /* attachment */ + p_refs[i].layout /* layout */ + }; + + att_refs.push_back(ref); + } } - if (fpCreateRenderPass2KHR == nullptr) { - return VK_ERROR_EXTENSION_NOT_PRESENT; + return att_refs; +} + +VkResult VulkanContext::vkCreateRenderPass2KHR(VkDevice p_device, const VkRenderPassCreateInfo2 *p_create_info, const VkAllocationCallbacks *p_allocator, VkRenderPass *p_render_pass) { + if (is_device_extension_enabled(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME)) { + if (fpCreateRenderPass2KHR == nullptr) { + fpCreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR)vkGetDeviceProcAddr(p_device, "vkCreateRenderPass2KHR"); + } + + if (fpCreateRenderPass2KHR == nullptr) { + return VK_ERROR_EXTENSION_NOT_PRESENT; + } else { + return (fpCreateRenderPass2KHR)(p_device, p_create_info, p_allocator, p_render_pass); + } } else { - return (fpCreateRenderPass2KHR)(device, pCreateInfo, pAllocator, pRenderPass); + // need to fall back on vkCreateRenderPass + + const void *next = p_create_info->pNext; // ATM we only support multiview which should work if supported. + + Vector<VkAttachmentDescription> attachments; + for (uint32_t i = 0; i < p_create_info->attachmentCount; i++) { + // Basically the old layout just misses type and next. + VkAttachmentDescription att = { + p_create_info->pAttachments[i].flags, /* flags */ + p_create_info->pAttachments[i].format, /* format */ + p_create_info->pAttachments[i].samples, /* samples */ + p_create_info->pAttachments[i].loadOp, /* loadOp */ + p_create_info->pAttachments[i].storeOp, /* storeOp */ + p_create_info->pAttachments[i].stencilLoadOp, /* stencilLoadOp */ + p_create_info->pAttachments[i].stencilStoreOp, /* stencilStoreOp */ + p_create_info->pAttachments[i].initialLayout, /* initialLayout */ + p_create_info->pAttachments[i].finalLayout /* finalLayout */ + }; + + attachments.push_back(att); + } + + Vector<VkSubpassDescription> subpasses; + for (uint32_t i = 0; i < p_create_info->subpassCount; i++) { + // Here we need to do more, again it's just stripping out type and next + // but we have VkAttachmentReference2 to convert to VkAttachmentReference. + // Also viewmask is not supported but we don't use it outside of multiview. + + Vector<VkAttachmentReference> input_attachments = _convert_VkAttachmentReference2(p_create_info->pSubpasses[i].inputAttachmentCount, p_create_info->pSubpasses[i].pInputAttachments); + Vector<VkAttachmentReference> color_attachments = _convert_VkAttachmentReference2(p_create_info->pSubpasses[i].colorAttachmentCount, p_create_info->pSubpasses[i].pColorAttachments); + Vector<VkAttachmentReference> resolve_attachments = _convert_VkAttachmentReference2(p_create_info->pSubpasses[i].colorAttachmentCount, p_create_info->pSubpasses[i].pResolveAttachments); + Vector<VkAttachmentReference> depth_attachments = _convert_VkAttachmentReference2(p_create_info->pSubpasses[i].colorAttachmentCount, p_create_info->pSubpasses[i].pDepthStencilAttachment); + + VkSubpassDescription subpass = { + p_create_info->pSubpasses[i].flags, /* flags */ + p_create_info->pSubpasses[i].pipelineBindPoint, /* pipelineBindPoint */ + p_create_info->pSubpasses[i].inputAttachmentCount, /* inputAttachmentCount */ + input_attachments.size() == 0 ? nullptr : input_attachments.ptr(), /* pInputAttachments */ + p_create_info->pSubpasses[i].colorAttachmentCount, /* colorAttachmentCount */ + color_attachments.size() == 0 ? nullptr : color_attachments.ptr(), /* pColorAttachments */ + resolve_attachments.size() == 0 ? nullptr : resolve_attachments.ptr(), /* pResolveAttachments */ + depth_attachments.size() == 0 ? nullptr : depth_attachments.ptr(), /* pDepthStencilAttachment */ + p_create_info->pSubpasses[i].preserveAttachmentCount, /* preserveAttachmentCount */ + p_create_info->pSubpasses[i].pPreserveAttachments /* pPreserveAttachments */ + }; + + subpasses.push_back(subpass); + } + + Vector<VkSubpassDependency> dependencies; + for (uint32_t i = 0; i < p_create_info->dependencyCount; i++) { + // We lose viewOffset here but again I don't believe we use this anywhere. + VkSubpassDependency dep = { + p_create_info->pDependencies[i].srcSubpass, /* srcSubpass */ + p_create_info->pDependencies[i].dstSubpass, /* dstSubpass */ + p_create_info->pDependencies[i].srcStageMask, /* srcStageMask */ + p_create_info->pDependencies[i].dstStageMask, /* dstStageMask */ + p_create_info->pDependencies[i].srcAccessMask, /* srcAccessMask */ + p_create_info->pDependencies[i].dstAccessMask, /* dstAccessMask */ + p_create_info->pDependencies[i].dependencyFlags, /* dependencyFlags */ + }; + + dependencies.push_back(dep); + } + + // CorrelatedViewMask is not supported in vkCreateRenderPass but we + // currently only use this for multiview. + // We'll need to look into this. + + VkRenderPassCreateInfo create_info = { + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, /* sType */ + next, /* pNext*/ + p_create_info->flags, /* flags */ + (uint32_t)attachments.size(), /* attachmentCount */ + attachments.ptr(), /* pAttachments */ + (uint32_t)subpasses.size(), /* subpassCount */ + subpasses.ptr(), /* pSubpasses */ + (uint32_t)dependencies.size(), /* */ + dependencies.ptr(), /* */ + }; + + return vkCreateRenderPass(device, &create_info, p_allocator, p_render_pass); } } @@ -296,16 +400,28 @@ Error VulkanContext::_obtain_vulkan_version() { return OK; } -Error VulkanContext::_initialize_extensions() { - uint32_t instance_extension_count = 0; +bool VulkanContext::instance_extensions_initialized = false; +HashMap<CharString, bool> VulkanContext::requested_instance_extensions; + +void VulkanContext::register_requested_instance_extension(const CharString &extension_name, bool p_required) { + ERR_FAIL_COND_MSG(instance_extensions_initialized, "You can only registered extensions before the Vulkan instance is created"); + ERR_FAIL_COND(requested_instance_extensions.has(extension_name)); + + requested_instance_extensions[extension_name] = p_required; +} + +Error VulkanContext::_initialize_instance_extensions() { + enabled_instance_extension_names.clear(); + + // Make sure our core extensions are here + register_requested_instance_extension(VK_KHR_SURFACE_EXTENSION_NAME, true); + register_requested_instance_extension(_get_platform_surface_extension(), true); + + if (_use_validation_layers()) { + register_requested_instance_extension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, false); + } - enabled_extension_count = 0; - enabled_debug_utils = false; - enabled_debug_report = false; - // Look for instance extensions. - VkBool32 surfaceExtFound = 0; - VkBool32 platformSurfaceExtFound = 0; - memset(extension_names, 0, sizeof(extension_names)); + register_requested_instance_extension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, false); // Only enable debug utils in verbose mode or DEV_ENABLED. // End users would get spammed with messages of varying verbosity due to the @@ -316,54 +432,141 @@ Error VulkanContext::_initialize_extensions() { #else bool want_debug_utils = OS::get_singleton()->is_stdout_verbose(); #endif + if (want_debug_utils) { + register_requested_instance_extension(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, false); + } + // Load instance extensions that are available... + uint32_t instance_extension_count = 0; VkResult err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, nullptr); ERR_FAIL_COND_V(err != VK_SUCCESS && err != VK_INCOMPLETE, ERR_CANT_CREATE); + ERR_FAIL_COND_V_MSG(instance_extension_count == 0, ERR_CANT_CREATE, "No instance extensions found, is a driver installed?"); - if (instance_extension_count > 0) { - VkExtensionProperties *instance_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * instance_extension_count); - err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, instance_extensions); - if (err != VK_SUCCESS && err != VK_INCOMPLETE) { - free(instance_extensions); - ERR_FAIL_V(ERR_CANT_CREATE); + VkExtensionProperties *instance_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * instance_extension_count); + err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, instance_extensions); + if (err != VK_SUCCESS && err != VK_INCOMPLETE) { + free(instance_extensions); + ERR_FAIL_V(ERR_CANT_CREATE); + } +#ifdef DEV_ENABLED + for (uint32_t i = 0; i < instance_extension_count; i++) { + print_verbose(String("VULKAN: Found instance extension ") + String(instance_extensions[i].extensionName)); + } +#endif + + // Enable all extensions that are supported and requested + for (uint32_t i = 0; i < instance_extension_count; i++) { + CharString extension_name(instance_extensions[i].extensionName); + if (requested_instance_extensions.has(extension_name)) { + enabled_instance_extension_names.insert(extension_name); } - for (uint32_t i = 0; i < instance_extension_count; i++) { - if (!strcmp(VK_KHR_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) { - surfaceExtFound = 1; - extension_names[enabled_extension_count++] = VK_KHR_SURFACE_EXTENSION_NAME; - } + } - if (!strcmp(_get_platform_surface_extension(), instance_extensions[i].extensionName)) { - platformSurfaceExtFound = 1; - extension_names[enabled_extension_count++] = _get_platform_surface_extension(); - } - if (!strcmp(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, instance_extensions[i].extensionName)) { - if (_use_validation_layers()) { - extension_names[enabled_extension_count++] = VK_EXT_DEBUG_REPORT_EXTENSION_NAME; - enabled_debug_report = true; - } - } - if (!strcmp(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instance_extensions[i].extensionName)) { - if (want_debug_utils) { - extension_names[enabled_extension_count++] = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; - enabled_debug_utils = true; - } - } - if (!strcmp(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, instance_extensions[i].extensionName)) { - extension_names[enabled_extension_count++] = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; - } - if (enabled_extension_count >= MAX_EXTENSIONS) { + // Now check our requested extensions + for (KeyValue<CharString, bool> &requested_extension : requested_instance_extensions) { + if (!enabled_instance_extension_names.has(requested_extension.key)) { + if (requested_extension.value) { free(instance_extensions); - ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG"); + ERR_FAIL_V_MSG(ERR_BUG, String("Required extension ") + String(requested_extension.key) + String(" not found, is a driver installed?")); + } else { + print_verbose(String("Optional extension ") + String(requested_extension.key) + String(" not found")); } } + } - free(instance_extensions); + free(instance_extensions); + + instance_extensions_initialized = true; + return OK; +} + +bool VulkanContext::device_extensions_initialized = false; +HashMap<CharString, bool> VulkanContext::requested_device_extensions; + +void VulkanContext::register_requested_device_extension(const CharString &extension_name, bool p_required) { + ERR_FAIL_COND_MSG(device_extensions_initialized, "You can only registered extensions before the Vulkan instance is created"); + ERR_FAIL_COND(requested_device_extensions.has(extension_name)); + + requested_device_extensions[extension_name] = p_required; +} + +Error VulkanContext::_initialize_device_extensions() { + // Look for device extensions. + enabled_device_extension_names.clear(); + + // Make sure our core extensions are here + register_requested_device_extension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, true); + + register_requested_device_extension(VK_KHR_MULTIVIEW_EXTENSION_NAME, false); + register_requested_device_extension(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME, false); + register_requested_device_extension(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, false); + register_requested_device_extension(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, false); + register_requested_device_extension(VK_KHR_16BIT_STORAGE_EXTENSION_NAME, false); + + // TODO consider the following extensions: + // - VK_KHR_spirv_1_4 + // - VK_KHR_swapchain_mutable_format + // - VK_EXT_full_screen_exclusive + // - VK_EXT_hdr_metadata + // - VK_KHR_depth_stencil_resolve + + // Even though the user "enabled" the extension via the command + // line, we must make sure that it's enumerated for use with the + // device. Therefore, disable it here, and re-enable it again if + // enumerated. + if (VK_KHR_incremental_present_enabled) { + register_requested_device_extension(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, false); + } + if (VK_GOOGLE_display_timing_enabled) { + register_requested_device_extension(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, false); + } + + // obtain available device extensions + uint32_t device_extension_count = 0; + VkResult err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, nullptr); + ERR_FAIL_COND_V(err, ERR_CANT_CREATE); + ERR_FAIL_COND_V_MSG(device_extension_count == 0, ERR_CANT_CREATE, + "vkEnumerateDeviceExtensionProperties failed to find any extensions\n\n" + "Do you have a compatible Vulkan installable client driver (ICD) installed?\n" + "vkCreateInstance Failure"); + + VkExtensionProperties *device_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * device_extension_count); + err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, device_extensions); + if (err) { + free(device_extensions); + ERR_FAIL_V(ERR_CANT_CREATE); + } + +#ifdef DEV_ENABLED + for (uint32_t i = 0; i < device_extension_count; i++) { + print_verbose(String("VULKAN: Found device extension ") + String(device_extensions[i].extensionName)); + } +#endif + + // Enable all extensions that are supported and requested + for (uint32_t i = 0; i < device_extension_count; i++) { + CharString extension_name(device_extensions[i].extensionName); + if (requested_device_extensions.has(extension_name)) { + enabled_device_extension_names.insert(extension_name); + } + } + + // Now check our requested extensions + for (KeyValue<CharString, bool> &requested_extension : requested_device_extensions) { + if (!enabled_device_extension_names.has(requested_extension.key)) { + if (requested_extension.value) { + free(device_extensions); + ERR_FAIL_V_MSG(ERR_BUG, + String("vkEnumerateDeviceExtensionProperties failed to find the ") + String(requested_extension.key) + String(" extension.\n\nDo you have a compatible Vulkan installable client driver (ICD) installed?\nvkCreateInstance Failure")); + } else { + print_verbose(String("Optional extension ") + String(requested_extension.key) + String(" not found")); + } + } } - ERR_FAIL_COND_V_MSG(!surfaceExtFound, ERR_CANT_CREATE, "No surface extension found, is a driver installed?"); - ERR_FAIL_COND_V_MSG(!platformSurfaceExtFound, ERR_CANT_CREATE, "No platform surface extension found, is a driver installed?"); + free(device_extensions); + device_extensions_initialized = true; return OK; } @@ -521,6 +724,9 @@ Error VulkanContext::_check_capabilities() { vrs_capabilities.pipeline_vrs_supported = false; vrs_capabilities.primitive_vrs_supported = false; vrs_capabilities.attachment_vrs_supported = false; + vrs_capabilities.min_texel_size = Size2i(); + vrs_capabilities.max_texel_size = Size2i(); + vrs_capabilities.texel_size = Size2i(); multiview_capabilities.is_supported = false; multiview_capabilities.geometry_shader_is_supported = false; multiview_capabilities.tessellation_shader_is_supported = false; @@ -537,155 +743,176 @@ Error VulkanContext::_check_capabilities() { storage_buffer_capabilities.storage_push_constant_16_is_supported = false; storage_buffer_capabilities.storage_input_output_16 = false; - // Check for extended features. - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2_func = (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceFeatures2"); - if (vkGetPhysicalDeviceFeatures2_func == nullptr) { - // In Vulkan 1.0 might be accessible under its original extension name. - vkGetPhysicalDeviceFeatures2_func = (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceFeatures2KHR"); - } - if (vkGetPhysicalDeviceFeatures2_func != nullptr) { - // Check our extended features. - VkPhysicalDeviceFragmentShadingRateFeaturesKHR vrs_features = { - /*sType*/ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, - /*pNext*/ nullptr, - /*pipelineFragmentShadingRate*/ false, - /*primitiveFragmentShadingRate*/ false, - /*attachmentFragmentShadingRate*/ false, - }; - - VkPhysicalDeviceShaderFloat16Int8FeaturesKHR shader_features = { - /*sType*/ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, - /*pNext*/ &vrs_features, - /*shaderFloat16*/ false, - /*shaderInt8*/ false, - }; + if (is_instance_extension_enabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) { + // Check for extended features. + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2_func = (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceFeatures2"); + if (vkGetPhysicalDeviceFeatures2_func == nullptr) { + // In Vulkan 1.0 might be accessible under its original extension name. + vkGetPhysicalDeviceFeatures2_func = (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceFeatures2KHR"); + } + if (vkGetPhysicalDeviceFeatures2_func != nullptr) { + // Check our extended features. + VkPhysicalDeviceFragmentShadingRateFeaturesKHR vrs_features = { + /*sType*/ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, + /*pNext*/ nullptr, + /*pipelineFragmentShadingRate*/ false, + /*primitiveFragmentShadingRate*/ false, + /*attachmentFragmentShadingRate*/ false, + }; - VkPhysicalDevice16BitStorageFeaturesKHR storage_feature = { - /*sType*/ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, - /*pNext*/ &shader_features, - /*storageBuffer16BitAccess*/ false, - /*uniformAndStorageBuffer16BitAccess*/ false, - /*storagePushConstant16*/ false, - /*storageInputOutput16*/ false, - }; + VkPhysicalDeviceShaderFloat16Int8FeaturesKHR shader_features = { + /*sType*/ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + /*pNext*/ &vrs_features, + /*shaderFloat16*/ false, + /*shaderInt8*/ false, + }; - VkPhysicalDeviceMultiviewFeatures multiview_features = { - /*sType*/ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, - /*pNext*/ &storage_feature, - /*multiview*/ false, - /*multiviewGeometryShader*/ false, - /*multiviewTessellationShader*/ false, - }; + VkPhysicalDevice16BitStorageFeaturesKHR storage_feature = { + /*sType*/ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, + /*pNext*/ &shader_features, + /*storageBuffer16BitAccess*/ false, + /*uniformAndStorageBuffer16BitAccess*/ false, + /*storagePushConstant16*/ false, + /*storageInputOutput16*/ false, + }; - VkPhysicalDeviceFeatures2 device_features; - device_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; - device_features.pNext = &multiview_features; + VkPhysicalDeviceMultiviewFeatures multiview_features = { + /*sType*/ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + /*pNext*/ &storage_feature, + /*multiview*/ false, + /*multiviewGeometryShader*/ false, + /*multiviewTessellationShader*/ false, + }; - vkGetPhysicalDeviceFeatures2_func(gpu, &device_features); + VkPhysicalDeviceFeatures2 device_features; + device_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; + device_features.pNext = &multiview_features; - vrs_capabilities.pipeline_vrs_supported = vrs_features.pipelineFragmentShadingRate; - vrs_capabilities.primitive_vrs_supported = vrs_features.primitiveFragmentShadingRate; - vrs_capabilities.attachment_vrs_supported = vrs_features.attachmentFragmentShadingRate; + vkGetPhysicalDeviceFeatures2_func(gpu, &device_features); - multiview_capabilities.is_supported = multiview_features.multiview; - multiview_capabilities.geometry_shader_is_supported = multiview_features.multiviewGeometryShader; - multiview_capabilities.tessellation_shader_is_supported = multiview_features.multiviewTessellationShader; + // We must check that the relative extension is present before assuming a + // feature as enabled. Actually, according to the spec we shouldn't add the + // structs in pNext at all, but this works fine. + // See also: https://github.com/godotengine/godot/issues/65409 + if (is_device_extension_enabled(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME)) { + vrs_capabilities.pipeline_vrs_supported = vrs_features.pipelineFragmentShadingRate; + vrs_capabilities.primitive_vrs_supported = vrs_features.primitiveFragmentShadingRate; + vrs_capabilities.attachment_vrs_supported = vrs_features.attachmentFragmentShadingRate; + } - shader_capabilities.shader_float16_is_supported = shader_features.shaderFloat16; - shader_capabilities.shader_int8_is_supported = shader_features.shaderInt8; + if (is_device_extension_enabled(VK_KHR_MULTIVIEW_EXTENSION_NAME)) { + multiview_capabilities.is_supported = multiview_features.multiview; + multiview_capabilities.geometry_shader_is_supported = multiview_features.multiviewGeometryShader; + multiview_capabilities.tessellation_shader_is_supported = multiview_features.multiviewTessellationShader; + } - storage_buffer_capabilities.storage_buffer_16_bit_access_is_supported = storage_feature.storageBuffer16BitAccess; - storage_buffer_capabilities.uniform_and_storage_buffer_16_bit_access_is_supported = storage_feature.uniformAndStorageBuffer16BitAccess; - storage_buffer_capabilities.storage_push_constant_16_is_supported = storage_feature.storagePushConstant16; - storage_buffer_capabilities.storage_input_output_16 = storage_feature.storageInputOutput16; - } + if (is_device_extension_enabled(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME)) { + shader_capabilities.shader_float16_is_supported = shader_features.shaderFloat16; + shader_capabilities.shader_int8_is_supported = shader_features.shaderInt8; + } - // Check extended properties. - PFN_vkGetPhysicalDeviceProperties2 device_properties_func = (PFN_vkGetPhysicalDeviceProperties2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceProperties2"); - if (device_properties_func == nullptr) { - // In Vulkan 1.0 might be accessible under its original extension name. - device_properties_func = (PFN_vkGetPhysicalDeviceProperties2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceProperties2KHR"); - } - if (device_properties_func != nullptr) { - VkPhysicalDeviceFragmentShadingRatePropertiesKHR vrsProperties{}; - VkPhysicalDeviceMultiviewProperties multiviewProperties{}; - VkPhysicalDeviceSubgroupProperties subgroupProperties{}; - VkPhysicalDeviceProperties2 physicalDeviceProperties{}; - void *nextptr = nullptr; + if (is_device_extension_enabled(VK_KHR_16BIT_STORAGE_EXTENSION_NAME)) { + storage_buffer_capabilities.storage_buffer_16_bit_access_is_supported = storage_feature.storageBuffer16BitAccess; + storage_buffer_capabilities.uniform_and_storage_buffer_16_bit_access_is_supported = storage_feature.uniformAndStorageBuffer16BitAccess; + storage_buffer_capabilities.storage_push_constant_16_is_supported = storage_feature.storagePushConstant16; + storage_buffer_capabilities.storage_input_output_16 = storage_feature.storageInputOutput16; + } + } - subgroupProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES; - subgroupProperties.pNext = nextptr; - nextptr = &subgroupProperties; + // Check extended properties. + PFN_vkGetPhysicalDeviceProperties2 device_properties_func = (PFN_vkGetPhysicalDeviceProperties2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceProperties2"); + if (device_properties_func == nullptr) { + // In Vulkan 1.0 might be accessible under its original extension name. + device_properties_func = (PFN_vkGetPhysicalDeviceProperties2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceProperties2KHR"); + } + if (device_properties_func != nullptr) { + VkPhysicalDeviceFragmentShadingRatePropertiesKHR vrsProperties{}; + VkPhysicalDeviceMultiviewProperties multiviewProperties{}; + VkPhysicalDeviceSubgroupProperties subgroupProperties{}; + VkPhysicalDeviceProperties2 physicalDeviceProperties{}; + void *nextptr = nullptr; + + if (!(vulkan_major == 1 && vulkan_minor == 0)) { + subgroupProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES; + subgroupProperties.pNext = nextptr; + + nextptr = &subgroupProperties; + } - if (multiview_capabilities.is_supported) { - multiviewProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES; - multiviewProperties.pNext = nextptr; + if (multiview_capabilities.is_supported) { + multiviewProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES; + multiviewProperties.pNext = nextptr; - nextptr = &multiviewProperties; - } + nextptr = &multiviewProperties; + } - if (vrs_capabilities.attachment_vrs_supported) { - vrsProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR; - vrsProperties.pNext = nextptr; + if (vrs_capabilities.attachment_vrs_supported) { + vrsProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR; + vrsProperties.pNext = nextptr; - nextptr = &vrsProperties; - } + nextptr = &vrsProperties; + } - physicalDeviceProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2; - physicalDeviceProperties.pNext = nextptr; + physicalDeviceProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2; + physicalDeviceProperties.pNext = nextptr; - device_properties_func(gpu, &physicalDeviceProperties); + device_properties_func(gpu, &physicalDeviceProperties); - subgroup_capabilities.size = subgroupProperties.subgroupSize; - subgroup_capabilities.supportedStages = subgroupProperties.supportedStages; - subgroup_capabilities.supportedOperations = subgroupProperties.supportedOperations; - // Note: quadOperationsInAllStages will be true if: - // - supportedStages has VK_SHADER_STAGE_ALL_GRAPHICS + VK_SHADER_STAGE_COMPUTE_BIT. - // - supportedOperations has VK_SUBGROUP_FEATURE_QUAD_BIT. - subgroup_capabilities.quadOperationsInAllStages = subgroupProperties.quadOperationsInAllStages; + subgroup_capabilities.size = subgroupProperties.subgroupSize; + subgroup_capabilities.supportedStages = subgroupProperties.supportedStages; + subgroup_capabilities.supportedOperations = subgroupProperties.supportedOperations; + // Note: quadOperationsInAllStages will be true if: + // - supportedStages has VK_SHADER_STAGE_ALL_GRAPHICS + VK_SHADER_STAGE_COMPUTE_BIT. + // - supportedOperations has VK_SUBGROUP_FEATURE_QUAD_BIT. + subgroup_capabilities.quadOperationsInAllStages = subgroupProperties.quadOperationsInAllStages; - if (vrs_capabilities.pipeline_vrs_supported || vrs_capabilities.primitive_vrs_supported || vrs_capabilities.attachment_vrs_supported) { - print_verbose("- Vulkan Variable Rate Shading supported:"); - if (vrs_capabilities.pipeline_vrs_supported) { - print_verbose(" Pipeline fragment shading rate"); - } - if (vrs_capabilities.primitive_vrs_supported) { - print_verbose(" Primitive fragment shading rate"); - } - if (vrs_capabilities.attachment_vrs_supported) { - // TODO expose these somehow to the end user. - vrs_capabilities.min_texel_size.x = vrsProperties.minFragmentShadingRateAttachmentTexelSize.width; - vrs_capabilities.min_texel_size.y = vrsProperties.minFragmentShadingRateAttachmentTexelSize.height; - vrs_capabilities.max_texel_size.x = vrsProperties.maxFragmentShadingRateAttachmentTexelSize.width; - vrs_capabilities.max_texel_size.y = vrsProperties.maxFragmentShadingRateAttachmentTexelSize.height; + if (vrs_capabilities.pipeline_vrs_supported || vrs_capabilities.primitive_vrs_supported || vrs_capabilities.attachment_vrs_supported) { + print_verbose("- Vulkan Variable Rate Shading supported:"); + if (vrs_capabilities.pipeline_vrs_supported) { + print_verbose(" Pipeline fragment shading rate"); + } + if (vrs_capabilities.primitive_vrs_supported) { + print_verbose(" Primitive fragment shading rate"); + } + if (vrs_capabilities.attachment_vrs_supported) { + // TODO expose these somehow to the end user. + vrs_capabilities.min_texel_size.x = vrsProperties.minFragmentShadingRateAttachmentTexelSize.width; + vrs_capabilities.min_texel_size.y = vrsProperties.minFragmentShadingRateAttachmentTexelSize.height; + vrs_capabilities.max_texel_size.x = vrsProperties.maxFragmentShadingRateAttachmentTexelSize.width; + vrs_capabilities.max_texel_size.y = vrsProperties.maxFragmentShadingRateAttachmentTexelSize.height; + + // We'll attempt to default to a texel size of 16x16 + vrs_capabilities.texel_size.x = CLAMP(16, vrs_capabilities.min_texel_size.x, vrs_capabilities.max_texel_size.x); + vrs_capabilities.texel_size.y = CLAMP(16, vrs_capabilities.min_texel_size.y, vrs_capabilities.max_texel_size.y); + + print_verbose(String(" Attachment fragment shading rate") + String(", min texel size: (") + itos(vrs_capabilities.min_texel_size.x) + String(", ") + itos(vrs_capabilities.min_texel_size.y) + String(")") + String(", max texel size: (") + itos(vrs_capabilities.max_texel_size.x) + String(", ") + itos(vrs_capabilities.max_texel_size.y) + String(")")); + } - print_verbose(String(" Attachment fragment shading rate") + String(", min texel size: (") + itos(vrs_capabilities.min_texel_size.x) + String(", ") + itos(vrs_capabilities.min_texel_size.y) + String(")") + String(", max texel size: (") + itos(vrs_capabilities.max_texel_size.x) + String(", ") + itos(vrs_capabilities.max_texel_size.y) + String(")")); + } else { + print_verbose("- Vulkan Variable Rate Shading not supported"); } - } else { - print_verbose("- Vulkan Variable Rate Shading not supported"); - } + if (multiview_capabilities.is_supported) { + multiview_capabilities.max_view_count = multiviewProperties.maxMultiviewViewCount; + multiview_capabilities.max_instance_count = multiviewProperties.maxMultiviewInstanceIndex; - if (multiview_capabilities.is_supported) { - multiview_capabilities.max_view_count = multiviewProperties.maxMultiviewViewCount; - multiview_capabilities.max_instance_count = multiviewProperties.maxMultiviewInstanceIndex; + print_verbose("- Vulkan multiview supported:"); + print_verbose(" max view count: " + itos(multiview_capabilities.max_view_count)); + print_verbose(" max instances: " + itos(multiview_capabilities.max_instance_count)); + } else { + print_verbose("- Vulkan multiview not supported"); + } - print_verbose("- Vulkan multiview supported:"); - print_verbose(" max view count: " + itos(multiview_capabilities.max_view_count)); - print_verbose(" max instances: " + itos(multiview_capabilities.max_instance_count)); + print_verbose("- Vulkan subgroup:"); + print_verbose(" size: " + itos(subgroup_capabilities.size)); + print_verbose(" stages: " + subgroup_capabilities.supported_stages_desc()); + print_verbose(" supported ops: " + subgroup_capabilities.supported_operations_desc()); + if (subgroup_capabilities.quadOperationsInAllStages) { + print_verbose(" quad operations in all stages"); + } } else { - print_verbose("- Vulkan multiview not supported"); + print_verbose("- Couldn't call vkGetPhysicalDeviceProperties2"); } - - print_verbose("- Vulkan subgroup:"); - print_verbose(" size: " + itos(subgroup_capabilities.size)); - print_verbose(" stages: " + subgroup_capabilities.supported_stages_desc()); - print_verbose(" supported ops: " + subgroup_capabilities.supported_operations_desc()); - if (subgroup_capabilities.quadOperationsInAllStages) { - print_verbose(" quad operations in all stages"); - } - } else { - print_verbose("- Couldn't call vkGetPhysicalDeviceProperties2"); } return OK; @@ -697,13 +924,20 @@ Error VulkanContext::_create_instance() { // Initialize extensions. { - Error err = _initialize_extensions(); + Error err = _initialize_instance_extensions(); if (err != OK) { return err; } } - CharString cs = ProjectSettings::get_singleton()->get("application/config/name").operator String().utf8(); + int enabled_extension_count = 0; + const char *enabled_extension_names[MAX_EXTENSIONS]; + ERR_FAIL_COND_V(enabled_instance_extension_names.size() > MAX_EXTENSIONS, ERR_CANT_CREATE); + for (const CharString &extension_name : enabled_instance_extension_names) { + enabled_extension_names[enabled_extension_count++] = extension_name.ptr(); + } + + CharString cs = GLOBAL_GET("application/config/name").operator String().utf8(); const VkApplicationInfo app = { /*sType*/ VK_STRUCTURE_TYPE_APPLICATION_INFO, /*pNext*/ nullptr, @@ -717,7 +951,7 @@ Error VulkanContext::_create_instance() { inst_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; inst_info.pApplicationInfo = &app; inst_info.enabledExtensionCount = enabled_extension_count; - inst_info.ppEnabledExtensionNames = (const char *const *)extension_names; + inst_info.ppEnabledExtensionNames = (const char *const *)enabled_extension_names; if (_use_validation_layers()) { _get_preferred_validation_layers(&inst_info.enabledLayerCount, &inst_info.ppEnabledLayerNames); } @@ -727,9 +961,9 @@ Error VulkanContext::_create_instance() { * After the instance is created, we use the instance-based * function to register the final callback. */ - VkDebugUtilsMessengerCreateInfoEXT dbg_messenger_create_info; - VkDebugReportCallbackCreateInfoEXT dbg_report_callback_create_info{}; - if (enabled_debug_utils) { + VkDebugUtilsMessengerCreateInfoEXT dbg_messenger_create_info = {}; + VkDebugReportCallbackCreateInfoEXT dbg_report_callback_create_info = {}; + if (is_instance_extension_enabled(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) { // VK_EXT_debug_utils style. dbg_messenger_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; dbg_messenger_create_info.pNext = nullptr; @@ -742,7 +976,7 @@ Error VulkanContext::_create_instance() { dbg_messenger_create_info.pfnUserCallback = _debug_messenger_callback; dbg_messenger_create_info.pUserData = this; inst_info.pNext = &dbg_messenger_create_info; - } else if (enabled_debug_report) { + } else if (is_instance_extension_enabled(VK_EXT_DEBUG_REPORT_EXTENSION_NAME)) { dbg_report_callback_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT; dbg_report_callback_create_info.flags = VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT | @@ -782,7 +1016,7 @@ Error VulkanContext::_create_instance() { volkLoadInstance(inst); #endif - if (enabled_debug_utils) { + if (is_instance_extension_enabled(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) { // Setup VK_EXT_debug_utils function pointers always (we use them for debug labels and names). CreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(inst, "vkCreateDebugUtilsMessengerEXT"); @@ -823,7 +1057,7 @@ Error VulkanContext::_create_instance() { ERR_FAIL_V(ERR_CANT_CREATE); break; } - } else if (enabled_debug_report) { + } else if (is_instance_extension_enabled(VK_EXT_DEBUG_REPORT_EXTENSION_NAME)) { CreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(inst, "vkCreateDebugReportCallbackEXT"); DebugReportMessageEXT = (PFN_vkDebugReportMessageEXT)vkGetInstanceProcAddr(inst, "vkDebugReportMessageEXT"); DestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(inst, "vkDestroyDebugReportCallbackEXT"); @@ -1004,12 +1238,6 @@ Error VulkanContext::_create_physical_device(VkSurfaceKHR p_surface) { free(physical_devices); - // Look for device extensions. - uint32_t device_extension_count = 0; - VkBool32 swapchainExtFound = 0; - enabled_extension_count = 0; - memset(extension_names, 0, sizeof(extension_names)); - // Get identifier properties. vkGetPhysicalDeviceProperties(gpu, &gpu_props); @@ -1035,83 +1263,13 @@ Error VulkanContext::_create_physical_device(VkSurfaceKHR p_surface) { device_api_version = gpu_props.apiVersion; - err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, nullptr); - ERR_FAIL_COND_V(err, ERR_CANT_CREATE); - - if (device_extension_count > 0) { - VkExtensionProperties *device_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * device_extension_count); - err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, device_extensions); - if (err) { - free(device_extensions); - ERR_FAIL_V(ERR_CANT_CREATE); - } - - for (uint32_t i = 0; i < device_extension_count; i++) { - if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME, device_extensions[i].extensionName)) { - swapchainExtFound = 1; - extension_names[enabled_extension_count++] = VK_KHR_SWAPCHAIN_EXTENSION_NAME; - } - if (!strcmp(VK_KHR_MULTIVIEW_EXTENSION_NAME, device_extensions[i].extensionName)) { - // If multiview is supported, enable it. - extension_names[enabled_extension_count++] = VK_KHR_MULTIVIEW_EXTENSION_NAME; - } - if (!strcmp(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME, device_extensions[i].extensionName)) { - // if shading rate image is supported, enable it - extension_names[enabled_extension_count++] = VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME; - } - if (!strcmp(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, device_extensions[i].extensionName)) { - extension_names[enabled_extension_count++] = VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME; - } - if (enabled_extension_count >= MAX_EXTENSIONS) { - free(device_extensions); - ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG"); - } - } - - if (VK_KHR_incremental_present_enabled) { - // Even though the user "enabled" the extension via the command - // line, we must make sure that it's enumerated for use with the - // device. Therefore, disable it here, and re-enable it again if - // enumerated. - VK_KHR_incremental_present_enabled = false; - for (uint32_t i = 0; i < device_extension_count; i++) { - if (!strcmp(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, device_extensions[i].extensionName)) { - extension_names[enabled_extension_count++] = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME; - VK_KHR_incremental_present_enabled = true; - } - if (enabled_extension_count >= MAX_EXTENSIONS) { - free(device_extensions); - ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG"); - } - } - } - - if (VK_GOOGLE_display_timing_enabled) { - // Even though the user "enabled" the extension via the command - // line, we must make sure that it's enumerated for use with the - // device. Therefore, disable it here, and re-enable it again if - // enumerated. - VK_GOOGLE_display_timing_enabled = false; - for (uint32_t i = 0; i < device_extension_count; i++) { - if (!strcmp(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, device_extensions[i].extensionName)) { - extension_names[enabled_extension_count++] = VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME; - VK_GOOGLE_display_timing_enabled = true; - } - if (enabled_extension_count >= MAX_EXTENSIONS) { - free(device_extensions); - ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG"); - } - } + { + Error _err = _initialize_device_extensions(); + if (_err != OK) { + return _err; } - - free(device_extensions); } - ERR_FAIL_COND_V_MSG(!swapchainExtFound, ERR_CANT_CREATE, - "vkEnumerateDeviceExtensionProperties failed to find the " VK_KHR_SWAPCHAIN_EXTENSION_NAME - " extension.\n\nDo you have a compatible Vulkan installable client driver (ICD) installed?\n" - "vkCreateInstance Failure"); - // Call with nullptr data to get count. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_family_count, nullptr); ERR_FAIL_COND_V(queue_family_count == 0, ERR_CANT_CREATE); @@ -1172,7 +1330,7 @@ Error VulkanContext::_create_device() { }; nextptr = &shader_features; - VkPhysicalDeviceFragmentShadingRateFeaturesKHR vrs_features; + VkPhysicalDeviceFragmentShadingRateFeaturesKHR vrs_features = {}; if (vrs_capabilities.pipeline_vrs_supported || vrs_capabilities.primitive_vrs_supported || vrs_capabilities.attachment_vrs_supported) { // Insert into our chain to enable these features if they are available. vrs_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR; @@ -1184,9 +1342,9 @@ Error VulkanContext::_create_device() { nextptr = &vrs_features; } - VkPhysicalDeviceVulkan11Features vulkan11features; - VkPhysicalDevice16BitStorageFeaturesKHR storage_feature; - VkPhysicalDeviceMultiviewFeatures multiview_features; + VkPhysicalDeviceVulkan11Features vulkan11features = {}; + VkPhysicalDevice16BitStorageFeaturesKHR storage_feature = {}; + VkPhysicalDeviceMultiviewFeatures multiview_features = {}; if (vulkan_major > 1 || vulkan_minor >= 2) { // In Vulkan 1.2 and newer we use a newer struct to enable various features. @@ -1206,7 +1364,7 @@ Error VulkanContext::_create_device() { vulkan11features.shaderDrawParameters = 0; nextptr = &vulkan11features; } else { - // On Vulkan 1.0 and 1.1 we use our older structs to initialise these features. + // On Vulkan 1.0 and 1.1 we use our older structs to initialize these features. storage_feature.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR; storage_feature.pNext = nextptr; storage_feature.storageBuffer16BitAccess = storage_buffer_capabilities.storage_buffer_16_bit_access_is_supported; @@ -1225,6 +1383,13 @@ Error VulkanContext::_create_device() { } } + uint32_t enabled_extension_count = 0; + const char *enabled_extension_names[MAX_EXTENSIONS]; + ERR_FAIL_COND_V(enabled_device_extension_names.size() > MAX_EXTENSIONS, ERR_CANT_CREATE); + for (const CharString &extension_name : enabled_device_extension_names) { + enabled_extension_names[enabled_extension_count++] = extension_name.ptr(); + } + VkDeviceCreateInfo sdevice = { /*sType*/ VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, /*pNext*/ nextptr, @@ -1234,7 +1399,7 @@ Error VulkanContext::_create_device() { /*enabledLayerCount*/ 0, /*ppEnabledLayerNames*/ nullptr, /*enabledExtensionCount*/ enabled_extension_count, - /*ppEnabledExtensionNames*/ (const char *const *)extension_names, + /*ppEnabledExtensionNames*/ (const char *const *)enabled_extension_names, /*pEnabledFeatures*/ &physical_device_features, // If specific features are required, pass them in here. }; if (separate_present_queue) { @@ -1322,7 +1487,7 @@ Error VulkanContext::_initialize_queues(VkSurfaceKHR p_surface) { GET_DEVICE_PROC_ADDR(device, GetSwapchainImagesKHR); GET_DEVICE_PROC_ADDR(device, AcquireNextImageKHR); GET_DEVICE_PROC_ADDR(device, QueuePresentKHR); - if (VK_GOOGLE_display_timing_enabled) { + if (is_device_extension_enabled(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME)) { GET_DEVICE_PROC_ADDR(device, GetRefreshCycleDurationGOOGLE); GET_DEVICE_PROC_ADDR(device, GetPastPresentationTimingGOOGLE); } @@ -1353,7 +1518,7 @@ Error VulkanContext::_initialize_queues(VkSurfaceKHR p_surface) { color_space = surfFormats[0].colorSpace; } else { // These should be ordered with the ones we want to use on top and fallback modes further down - // we want a 32bit RGBA unsigned normalised buffer or similar. + // we want a 32bit RGBA unsigned normalized buffer or similar. const VkFormat allowed_formats[] = { VK_FORMAT_B8G8R8A8_UNORM, VK_FORMAT_R8G8B8A8_UNORM @@ -1702,18 +1867,22 @@ Error VulkanContext::_update_swap_chain(Window *window) { preTransform = surfCapabilities.currentTransform; } - // Find a supported composite alpha mode - one of these is guaranteed to be set. VkCompositeAlphaFlagBitsKHR compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; - VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = { - VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, - VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, - VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR, - VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, - }; - for (uint32_t i = 0; i < ARRAY_SIZE(compositeAlphaFlags); i++) { - if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i]) { - compositeAlpha = compositeAlphaFlags[i]; - break; + + if (OS::get_singleton()->is_layered_allowed() || !(surfCapabilities.supportedCompositeAlpha & compositeAlpha)) { + // Find a supported composite alpha mode - one of these is guaranteed to be set. + VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = { + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR, + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + }; + + for (uint32_t i = 0; i < ARRAY_SIZE(compositeAlphaFlags); i++) { + if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i]) { + compositeAlpha = compositeAlphaFlags[i]; + break; + } } } @@ -2073,7 +2242,7 @@ Error VulkanContext::swap_buffers() { VkResult err; #if 0 - if (VK_GOOGLE_display_timing_enabled) { + if (is_device_extension_enabled(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME)) { // Look at what happened to previous presents, and make appropriate // adjustments in timing. DemoUpdateTargetIPD(demo); @@ -2194,7 +2363,7 @@ Error VulkanContext::swap_buffers() { } #if 0 - if (VK_KHR_incremental_present_enabled) { + if (is_device_extension_enabled(VK_KHR_incremental_present_enabled)) { // If using VK_KHR_incremental_present, we provide a hint of the region // that contains changed content relative to the previously-presented // image. The implementation can use this hint in order to save @@ -2225,7 +2394,7 @@ Error VulkanContext::swap_buffers() { #endif #if 0 - if (VK_GOOGLE_display_timing_enabled) { + if (is_device_extension_enabled(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME)) { VkPresentTimeGOOGLE ptime; if (prev_desired_present_time == 0) { // This must be the first present for this swapchain. @@ -2255,7 +2424,7 @@ Error VulkanContext::swap_buffers() { /*swapchainCount*/ present.swapchainCount, /*pTimes*/ &ptime, }; - if (VK_GOOGLE_display_timing_enabled) { + if (is_device_extension_enabled(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME)) { present.pNext = &present_time; } } @@ -2328,6 +2497,13 @@ RID VulkanContext::local_device_create() { queues[0].pQueuePriorities = queue_priorities; queues[0].flags = 0; + uint32_t enabled_extension_count = 0; + const char *enabled_extension_names[MAX_EXTENSIONS]; + ERR_FAIL_COND_V(enabled_device_extension_names.size() > MAX_EXTENSIONS, RID()); + for (const CharString &extension_name : enabled_device_extension_names) { + enabled_extension_names[enabled_extension_count++] = extension_name.ptr(); + } + VkDeviceCreateInfo sdevice = { /*sType =*/VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, /*pNext */ nullptr, @@ -2337,7 +2513,7 @@ RID VulkanContext::local_device_create() { /*enabledLayerCount */ 0, /*ppEnabledLayerNames */ nullptr, /*enabledExtensionCount */ enabled_extension_count, - /*ppEnabledExtensionNames */ (const char *const *)extension_names, + /*ppEnabledExtensionNames */ (const char *const *)enabled_extension_names, /*pEnabledFeatures */ &physical_device_features, // If specific features are required, pass them in here. }; err = vkCreateDevice(gpu, &sdevice, nullptr, &ld.device); @@ -2402,7 +2578,7 @@ void VulkanContext::local_device_free(RID p_local_device) { } void VulkanContext::command_begin_label(VkCommandBuffer p_command_buffer, String p_label_name, const Color p_color) { - if (!enabled_debug_utils) { + if (!is_instance_extension_enabled(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) { return; } @@ -2419,7 +2595,7 @@ void VulkanContext::command_begin_label(VkCommandBuffer p_command_buffer, String } void VulkanContext::command_insert_label(VkCommandBuffer p_command_buffer, String p_label_name, const Color p_color) { - if (!enabled_debug_utils) { + if (!is_instance_extension_enabled(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) { return; } CharString cs = p_label_name.utf8(); @@ -2435,14 +2611,14 @@ void VulkanContext::command_insert_label(VkCommandBuffer p_command_buffer, Strin } void VulkanContext::command_end_label(VkCommandBuffer p_command_buffer) { - if (!enabled_debug_utils) { + if (!is_instance_extension_enabled(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) { return; } CmdEndDebugUtilsLabelEXT(p_command_buffer); } void VulkanContext::set_object_name(VkObjectType p_object_type, uint64_t p_object_handle, String p_object_name) { - if (!enabled_debug_utils) { + if (!is_instance_extension_enabled(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) { return; } CharString obj_data = p_object_name.utf8(); @@ -2503,7 +2679,7 @@ VulkanContext::~VulkanContext() { vkDestroySemaphore(device, image_ownership_semaphores[i], nullptr); } } - if (inst_initialized && enabled_debug_utils) { + if (inst_initialized && is_instance_extension_enabled(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) { DestroyDebugUtilsMessengerEXT(inst, dbg_messenger, nullptr); } if (inst_initialized && dbg_debug_report != VK_NULL_HANDLE) { diff --git a/drivers/vulkan/vulkan_context.h b/drivers/vulkan/vulkan_context.h index 9889cf336b..0d49f5fe9f 100644 --- a/drivers/vulkan/vulkan_context.h +++ b/drivers/vulkan/vulkan_context.h @@ -34,6 +34,7 @@ #include "core/error/error_list.h" #include "core/os/mutex.h" #include "core/string/ustring.h" +#include "core/templates/hash_map.h" #include "core/templates/rb_map.h" #include "core/templates/rid_owner.h" #include "servers/display_server.h" @@ -76,6 +77,8 @@ public: Size2i min_texel_size; Size2i max_texel_size; + + Size2i texel_size; // The texel size we'll use }; struct ShaderCapabilities { @@ -182,18 +185,15 @@ private: int command_buffer_count = 1; // Extensions. + static bool instance_extensions_initialized; + static HashMap<CharString, bool> requested_instance_extensions; + HashSet<CharString> enabled_instance_extension_names; + static bool device_extensions_initialized; + static HashMap<CharString, bool> requested_device_extensions; + HashSet<CharString> enabled_device_extension_names; bool VK_KHR_incremental_present_enabled = true; bool VK_GOOGLE_display_timing_enabled = true; - uint32_t enabled_extension_count = 0; - const char *extension_names[MAX_EXTENSIONS]; - bool enabled_debug_utils = false; - - /** - * True if VK_EXT_debug_report extension is used. VK_EXT_debug_report is deprecated but it is - * still used if VK_EXT_debug_utils is not available. - */ - bool enabled_debug_report = false; PFN_vkCreateDebugUtilsMessengerEXT CreateDebugUtilsMessengerEXT = nullptr; PFN_vkDestroyDebugUtilsMessengerEXT DestroyDebugUtilsMessengerEXT = nullptr; @@ -222,7 +222,8 @@ private: VkDebugReportCallbackEXT dbg_debug_report = VK_NULL_HANDLE; Error _obtain_vulkan_version(); - Error _initialize_extensions(); + Error _initialize_instance_extensions(); + Error _initialize_device_extensions(); Error _check_capabilities(); VkBool32 _check_layers(uint32_t check_count, const char *const *check_names, uint32_t layer_count, VkLayerProperties *layers); @@ -257,6 +258,8 @@ private: Error _create_swap_chain(); Error _create_semaphores(); + Vector<VkAttachmentReference> _convert_VkAttachmentReference2(uint32_t p_count, const VkAttachmentReference2 *p_refs); + protected: virtual const char *_get_platform_surface_extension() const = 0; @@ -270,7 +273,8 @@ protected: public: // Extension calls. - VkResult vkCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass); + bool supports_renderpass2() const { return is_device_extension_enabled(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME); } + VkResult vkCreateRenderPass2KHR(VkDevice p_device, const VkRenderPassCreateInfo2 *p_create_info, const VkAllocationCallbacks *p_allocator, VkRenderPass *p_render_pass); uint32_t get_vulkan_major() const { return vulkan_major; }; uint32_t get_vulkan_minor() const { return vulkan_minor; }; @@ -289,6 +293,16 @@ public: static void set_vulkan_hooks(VulkanHooks *p_vulkan_hooks) { vulkan_hooks = p_vulkan_hooks; }; + static void register_requested_instance_extension(const CharString &extension_name, bool p_required); + bool is_instance_extension_enabled(const CharString &extension_name) const { + return enabled_instance_extension_names.has(extension_name); + } + + static void register_requested_device_extension(const CharString &extension_name, bool p_required); + bool is_device_extension_enabled(const CharString &extension_name) const { + return enabled_device_extension_names.has(extension_name); + } + void window_resize(DisplayServer::WindowID p_window_id, int p_width, int p_height); int window_get_width(DisplayServer::WindowID p_window = 0); int window_get_height(DisplayServer::WindowID p_window = 0); |